var/home/core/zuul-output/0000755000175000017500000000000015135715617014540 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015135721776015507 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000251303115135721642020261 0ustar corecorewikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs$r.k9GfD ~FYˋN-俪|-ş" ^^^ӟx7՚jWc-bֳ<ξ>|Ƭ>Uח۬eyϫ7N۫㻯7bz1[/y}U~(+2'rs\mw6鮾f?&~|3_L2_f_ṴHJ2E$(Ͼw7 +]W7t;[V$+Wxi2?<{9<;>'m_VͬkmVN:`S|J^[^Sg\W#NqɌDSd1d9nT#Abn q1J# !8,$RNI? r.|I"n j/\U R[EC 7g/7_E'c/z&BBbm1lχtO Ң` u"==:zR9P p&=;mk b2 ܨ;RJñF%QV5pDVHwԡ/.2h{ qۀK8yed3KdMvw×`21ɻ}/<@8(PN_,_0;o_x+Fyh\dN9:bġ7 -,qӸZpə+1i:yWO[l6ro%-9}tX 1hɨ|аK"B[T'A25T jzdz6"ٍ߬i3x-QI)k=-\$)'/N̔ڧO1_9C'/-#\1, Gꨦ#Jҡ/츣s6KT[<~6շ}~90}(*T7siv`̞Qj_P\ Q]GcPN:E↖t8m1ga5"f[B[fhTQkwW-+b8_b|r wFuRI%TѬza\_3/wn>- MRyC jnz+(%2݇^^J"ͦ>CMMQQ؏*ΧL ߞNPi?$;g&uw8~Y >hl%}Р`sMC@ztԝp ,}Nptt%q6& ND lM?ָPZGa(X(2*91n,5o0/-y+})')SĔv}S%xhRe)a@r AF' ]J)Өb~CףNjʵ:P+-/< )VCRQrC|}_~ܥ0~fgkԇAt8>uJڡcp4A{C U =T+}gr|CKrυ] g"tta[I!;c%6$V<[+*J:AI \:-rR b B"~?4 W4B3lLRD|@+.+s~t?53!}~V[F)RH?uvͪ _5l m*7h?cF_]CNx0w#RI)X)yCBoX^P\Ja 79clw/H tBFKskޒ1,%$BվCh,xɦSwPmϔKi0>,A==lM9Ɍp6^Ws5!90n݌ mr"/QI&dLp4+CN(44iVz- 1 EaE nQ Ӌ[kcih>F Lbِp!\mT\=e~eADG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xN&}| vb- 9ܯj 5EM9T3\ܧk U։njs2 (Қh)_V}fr3,4BoXk?-t͏MJ%\t=[ٹ:11:2`c J1bV_gɊ:+^킖,~0;gj"A, rXr*0ngY.] <ʜ6 ;,9VPAHuŠoh=خ{> *ntacԇ&~hb]l[9'݌6od y W'BϓTLl-9Ja [$3BV2DC4l!TO C*Mrii1f5 JA *#jv߽Imq%u LOL8c3ilLJ!Ip,2(( *%KGj l  %*e5-wFpad0Q?zLV3\Lu3~Hx>,|$;rVauNjk-ؘPꐤ`FD'JɻXC&>.~y7/(.Xݚi;n%PAU?/(z_jx؎>М>ӗ_9om$rۇnt|̇UIwӜ'=A|gQ)-,=1A ˒sK|ۜLɽy]ʸEO<-YEqKzϢ \{>dDLF amKGm+`VLJsC>=5rk{-3ss`yB}U%:X:@;afU=sru+}K >Y%LwM*t{zƝ$d߉{Ny';ɴ6hOI']dC5`t9:GO: FmlN*:g^;T^B0$B%}6Θ%|5u=kkN2;;#΢1c qw ˩~C6ȴ .#Sq9}5zoX#ZVOy4%-Lq6d b}O$B#fethBE;1"l r  B+R6Qp%;R8P󦟶Ub-L::d\;ELO3芷AgX*;RgXGdCgX JgX2*Ъ3:O7ǭ3ږA :}d,ZByX/&Ksg3["66hŢFD&iQCFd4%h= ztKmdߟ9i {A.:M {bZo:Xko;$UYwS1dӧl 5Yp$'}Zv"ꒄℬT ٪ȿ$jXWFI#R޸B4vOL-LIP E&G`JS[l52, 5 CۈP$0Zg=+DJ%D  *NpJ֊iTn!tT̅Rhɇ ќuޏ¢6}#LpFD58LQ Lf~/EOFZ2XrH_HI\:U}UE$J @ٚeZE0(8ŋ ϓ{Ba>EE衢^}p/:F?}bi0>Oh%\x(bdF"F 'u Qx`j#(g6zƯRo(lџŤnE7^k(|(4s\9#.\r= (mO(f=rWmd'rDZ~;o\mkmB`s ~7!GdјCyEߖs|n|zu0VhI|/{}BC6q>HĜ]Xgy G[Ŷ.|37xo=N4wjDH>:&EOΆ<䧊1v@b&툒f!yO){~%gq~.LK78F#E01g.u7^Ew_lv۠M0}qk:Lx%` urJp)>I(>z`{|puB"8#YkrZ .`h(eek[?̱ՒOOc&!dVzMEHH*V"MC Qؽ1Omsz/v0vȌJBIG,CNˆ-L{L #cNqgVR2r뭲⭊ڰ08uirP qNUӛ<|߈$m뫷dùB Z^-_dsz=F8jH˽&DUh+9k̈́W^̤F˖.kL5̻wS"!5<@&] WE\wMc%={_bD&k 5:lb69OBCC*Fn) u{Hk|v;tCl2m s]-$zQpɡr~]Si!ڣZmʢ鉗phw j8\c4>0` R?da,ȍ/ءfQ 2ؐfc}l 2窾ۉ1k;A@z>T+DE 6Хm<쉶K`'#NC5CL]5ݶI5XK.N)Q!>zt?zpPC ¶.vBTcm"Bsp rjﺧK]0/k<'dzM2dk–flE]_vE P / څZg`9r| 5W;`.4&XkĴp 6l0Cз5O[{B-bC\/`m(9A< f`mPіpNЦXn6g5m 7aTcTA,} q:|CBp_uFȆx6ڮܷnZ8dsMS^HэUlq 8\C[n膗:68DkM\7"Ǻzfbx]ۮC=1ÓOv$sY6eX%]Y{⦁# &SlM'iMJ았 t% ~@1c@K?k^rEXws zz.8`hiPܮbC7~n b?`CtjT6l>X+,Qb5ȳp`FMeXÅ0+!86{V5y8 M`_Uw ȗkU]a[.D}"\I5/1o٩|U戻,6t錳"EFk:ZM/!ݛ@pRu Iヵvyne 0=HH3n@.>C@{GP 9::3(6e™nvOσ =?6ͪ)Bppًu_w/m/0}T>CUX\!xl=ZVM\aٟ6h㗶E۶{O#X26.Fٱq1M k'JE%"2.*""]8yܑ4> >X1 smD) ̙TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2n6 Ym^]IL'M+;U t>x]U5g B(, qA9r;$IN&CM(F+ hGI~Q<웰[, qnriY]3_P${,<\V}7T g6Zapto}PhS/b&X0$Ba{a`W%ATevoYFF"4En.O8ϵq\FOXƀf qbTLhlw?8p@{]oOtsϑ`94t1!F PI;i`ޮMLX7sTGP7^s08p15w q o(uLYQB_dWoc0a#K1P,8]P)\wEZ(VҠQBT^e^0F;)CtT+{`Bh"% !.bBQPnT4ƈRa[F=3}+BVE~8R{3,>0|:,5j358W]>!Q1"6oT[ҟ^T;725Xa+wqlR)<#!9!籈K*:!@NI^S"H=ofLx _lp ꖚӜ3C 4dM @x>ۙZh _uoֺip&1ڙʪ4\RF_04H8@>fXmpLJ5jRS}_D U4x[c) ,`̔Dvckk5Ťã0le۞]o~oW(91ݧ$uxp/Cq6Un9%ZxðvGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kre_f |Nm8p5H!jR@Aiߒ߈ۥLFTk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊onxlM?~n Θ5 ӂxzPMcVQ@ӤomY42nrQ\'"P؝J7g+#!k{paqTԫ?o?VU}aK q;T0zqaj0"2p؋9~bޏt>$AZLk;3qUlWU Ry==qgJ8@o2k'Hr~4Z(I8!H G8HNW%1Tќ^?z%lOONRѦmDVmxюݏX}K6"Qi32\-V_kR(I-wtSJR^m{d a|y,F9$^@mdH֙toN1 < ҷBq/ ۓ,j|z6OSu;BKŨʐPqO K\{jDiy@}b|Z79ߜih(+PKO;!o\戔-QB EM;oH$$]?4~YrXY%Ο@oHwlXiW\ΡbN}l4VX|"0]! YcVi)@kF;'ta%*xU㔸,A|@WJfVP6`ڼ3qY.[U BTR0u$$hG$0NpF]\ݗe$?# #:001w<{{B\rhGg JGIެE.:zYrY{*2lVǻXEB6;5NE#eb3aīNLd&@yz\?))H;h\ߍ5S&(w9Z,K44|<#EkqTkOtW]﮶f=.*LD6%#-tңx%>MZ'0-bB$ !)6@I<#`L8턻r\Kuz*]}%b<$$^LJ<\HGbIqܢcZW {jfѐ6 QڣPt[:GfCN ILhbB.*IH7xʹǙMVA*J'W)@9 Ѷ6jىY* 85{pMX+]o$h{KrҎl 5sÁbNW\: "HK<bdYL_Dd)VpA@A i"j<鮗 qwc&dXV0e[g#B4x╙✑3'-i{SEȢbK6}{Ⱥi!ma0o xI0&" 9cT)0ߢ5ڦ==!LgdJΆmΉO]T"DĊKٙ@qP,i Nl:6'5R.j,&tK*iOFsk6[E__0pw=͠qj@o5iX0v\fk= ;H J/,t%Rwó^;n1z"8 P޿[V!ye]VZRԾ|“qNpѓVZD2"VN-m2do9 'H*IM}J ZaG%qn*WE^k1v3ڣjm7>ƽl' ,Τ9)%@ wl42iG.y3bBA{pR A ?IEY ?|-nz#}~f ‰dŷ=ɀ,m7VyIwGHέ 2tޞߛM{FL\#a s.3\}*=#uL#]  GE|FKi3&,ۓxmF͉lG$mN$!;ߑl5O$}D~5| 01 S?tq6cl]M[I5'ոfiҞ:Z YՑ"jyKWk^dd@U_a4/vvV qHMI{+']1m]<$*YP7g# s!8!ߐ>'4k7/KwΦθW'?~>x0_>9Hhs%y{#iUI[Gzďx7OnuKRv'm;/~n-KI`5-'YݦD-!+Y򼤙&m^YAKC˴vҢ]+X`iDf?U7_nMBLϸY&0Ro6Qžl+nݷ" 㬙g|ӱFB@qNx^eCSW3\ZSA !c/!b"'9k I S2=bgj쯏W?=`}H0--VV#YmKW^[?R$+ +cU )?wW@!j-gw2ŝl1!iaI%~`{Tռl>~,?5D K\gd(ZH8@x~5w.4\h(`dc)}1Kqi4~'p!;_V>&M!s}FDͳ֧0O*Vr/tdQu!4YhdqT nXeb|Ivż7>! &ĊL:}3*8&6f5 %>~R݄}WgѨ@OĹCtWai4AY!XH _pw騋[b[%/d>. !Df~;)(Oy )r#.<]]i-*ػ-f24qlT1  jL>1qY|\䛧\|r>Ch}Ϊ=jnk?p ^C8"M#Eޑ-5@f,|Ά(Շ*(XCK*"pXR[كrq IH!6=Ocnи%G"|ڔ^kПy׏<:n:!d#[7>^.hd/}ӾP'k2MؤYy/{!ca /^wT j˚ب|MLE7Ee/I lu//j8MoGqdDt^_Y\-8!ד|$@D.ݮl`p48io^.š{_f>O)J=iwwӑ؇n-i3,1׿5'odۆ3(h>1UW蚍R$W>ue\0zE|!@E " ;9Ώf3kZc7B)!=?8[Y|-ɬeǪzd;-s~CM>e:9[_v~\:P ؇'k01Q1jlX)/ΏL+NhBUx~Ga>Z"Q_wjTLRˀtL L+BT҂ll魳cf[L̎`;rK+S- (J[(6 b F? ZvƂcW+dˍ-m𢛲@ms~}3ɱ© R$ T5%:zZ甎܋)`ŰJ38!;NfHohVbK :S50exU}W`upHЍE_fNTU*q%bq@/5q0);F74~'*z[\M-~#aSmMÉB2Nnʇ)bAg`u2t"8U [tJYSk, "vu\h1Yhl~[mhm+F(g 6+YtHgd/}7m]Q!Mę5bR!JbV>&w6οH+NL$]p>8UU>Ѫg39Yg>OF9V?SAT~:gGt $*}aQ.Zi~%K\rfm$%ɪq(%W>*Hg>KStE)KS1z2"h%^NEN?  hxnd/)O{,:خcX1nIaJ/t4J\bƀWc-d4M^d/ ʂK0`v%"s#PCoT/*,:[4b=]N&, ,B82^WK9EHLPm))2.9ȱ  QAcBC-|$M\^B!`}M^t+C~Lb }D>{N{Vt)tpDN,FCz~$)*417l;V iэ(_,j]$9O+/Sh]ice wy\Mڗ$,DJ|lj*à␻,?XAe0bX@ h0[}BU0v']#Vo !ې: Z%ƶ(fl>'"Bg< 0^_d0Y@2!ӸfZ{Ibi/^cygwדzY'Ź$:fr;)ٔf ՠ3Kcxwg*EQU{$Sڸ3x~ 5clgSAW"X Pҿ.ظwyV}̒KX9U1>V..W%GX +Uvzg=npu{do#Vb4ra\sNC/T"*!k愨}plm@+@gSUX覽t01:)6kSL9Ug6rEr(3{ xRP8_S( $?uk| ]bP\vۗ晋cgLz2r~MMp!~~h?ljUc>rw}xxݸǻ*Wu{}M?\GSߋ2ꮺ5w"7U0)lۨB0ח*zW߬V}Z۫ܨJ<]B=\>V7¯8nq~q?A-?T_qOq?5-3 |q|w.dަ'/Y?> (<2y. ">8YAC| w&5fɹ(ȊVã50z)la.~LlQx[b&Pĥx BjIKn"@+z'}ũrDks^F\`%Di5~cZ*sXLqQ$q6v+jRcepO}[ s\VF5vROq%mX-RÈlб 6jf/AfN vRPػ.6<'"6dv .z{I>|&ׇ4Ăw4 [P{]"}r1殲)ߚA 2J1SGpw>ٕQѱ vb;pV ^WO+į1tq61W vzZ U'=҅}rZ:T#\_:ď);KX!LHuQ (6c94Ce|u$4a?"1] `Wa+m𢛲`Rs _I@U8jxɕͽf3[Pg%,IR Ř`QbmүcH&CLlvLҼé1ivGgJ+u7Τ!ljK1SpHR>:YF2cU(77eGG\ m#Tvmە8[,)4\\=V~?C~>_) cxF;;Ds'n [&8NJP5H2Զj{RC>he:ա+e/.I0\lWoӊĭYcxN^SPiMrFI_"*l§,̀+ å} .[c&SX( ( =X?D5ۙ@m cEpR?H0F>v6A*:W?*nzfw*B#d[se$U>tLNÔ+XX߇`cu0:U[tp^}{>H4z 4 (DtH-ʐ?sk7iIbΏ%T}v}e{aBs˞L=ilNeb]nltwfCEI"*S k`u ygz[~S [j3+sE.,uDΡ1R:Vݐ/CBc˾] shGՙf 2+);W{@dlG)%عF&4D&u.Im9c$A$Dfj-ء^6&#OȯTgرBӆI t[ 5)l>MR2ǂv JpU1cJpրj&*ߗEЍ0U#X) bpNVYSD1౱UR}UR,:lơ2<8"˓MlA2 KvP8 I7D Oj>;V|a|`U>D*KS;|:xI/ió21׭ȦS!e^t+28b$d:z4 .}gRcƈ^ʮC^0l[hl"য*6 ny!HQ=GOf"8vAq&*țTOWse~ (5TX%/8vS:w}[ą qf2Lυi lm/+QD4t.P*2V J`\g2%tJ4vX[7g"z{1|\*& >Vv:V^S7{{u%[^g=pn]Y#&ߓTί_z7e&ӃCx;xLh+NOEp";SB/eWٹ`64F 2AhF{Ɩ;>87DǍ-~e;\26Lة:*mUAN=VޮL> jwB}ѹ .MVfz0Ïd0l?7- }|>TT%9d-9UK=&l&~g&i"L{vrQۻou}q}hn+.{pWEqws]]|/ǫ\}/J.MLmc ԗWrU}/Ǜ+sYn[ﯾeywyY]]¨Kpx c./mo;ߟRy*4݀wm&8֨Or4 &+Bs=8'kP 3 |}44S8UXi;f;VE7e4AdX-fS烠1Uܦ$lznlq"җ^s RTn|RKm;ԻZ3)`S!9| ?}m*2@"G{yZ${˪A6yq>Elq*E< NX9@: Ih~|Y4sopp|v1f2춓t$ė]ioH+PMv8oxdFlJHl[ER|cƊ[g=U]]]GwWL]O6Ȯ*L*`WmR$b+mwU r.# wU*D!"Yj(B]S R$+y]x@ ;/(ys+nV%s~]Upw!Y5s1O`ﬥWNC vU=.2ݍwpYOypY -OxgɠNdvsl&]U< Az3+|O q =tǃ[|(ۈ }Ahju;;anpO;9' [ӵւתxnSH*dkp>MtNuԴOuTg3t÷^o+㰨vZPx `ב0axM״<׳B!_y#x/}d0!<{3MMHn ݔN9WKl2ZwlxI̵~ /E&T}MD;e28 c$u-OjzLčзX^bR 2mGlP#SDD ^缚 >Yx6x@a yf! CE^*_Ĉmsy 3-#CM˲72lgpǎ>3`OId}n):=O&t#S$D-,,$i2- `<"AлX穸]5Bؾ 4mŚVʹDE!Q$r`W"z5L|UYV' >1B.$U31 (Q92b޼G'BfU'7t7rR4c[u^zHu2b4ZxSAZz&J.[B> fƋh`֞*}G2IPg (!uUx[o<{45@\\4]4鹜[8(ly)ƣS^;jk0RfpJ\ B3ȠK#奈yRfK9J;kU_ҳ9w+bQO0ȣeFu+4m#|Ϣ:ϊc,1^Rq?jRU2>cN8J~=hnq G;D<W'L UI0ȵg!PS k?<?=L$PM#A {Nۣfb[zpR|9|w@[#ɥ۠4(9U0 [A(mo\4xh(-E_I`Ӵy|5",Y$x e#{/3&y~I߉9yxՌ0B񦮲90zó:'A1 ~ :nop-7D O^Nn~=d,#C/0Q 5e3˛I{JZ RCc ̢&& ϷՄmJrC"XZ$Je92][8QLHqU*6HTWHt Jq({!mD ~xHFd}J6<< -غ`C=&[ )\uQX-jslkR&w٣HļN}*}1G"P).x Kݔ:bxR{ZڿDV=kSu`.TUknZUs̔;{;<3n 3 Yi@LٚDzpݴf'Cn59MB`;9☞6cst9 ʖ3Ca'L@Y q{-RMv.]`,Kq&DDZb eXb M#l)5~hLazMϠ͈&1L'Ummk! e"xRU aUO0iUVsn[YQ{H<2U~d˙*ҟLNgZ5y;p:|R麻!$;FW56#BmPy8ZuC4)"eioH'2ӵaH]YU1צ!XfP[^JF0UΛ$܀UfU4i>%jmʳjRs*򜇲d%݄Ӏ)1PekMeLK{*#8ٺ/4lźކcXNK0#wԔ+qQ=lCʽda "E9o_m+ճV3 r,C|3 7 R+LB@P⎊x*cQVZ]$D+flsZ+0Pwqo(E4dicoHU 3,5M6:e "lZ=Q(G!V3f UK\b x!hcM2uGa/R`p1| *m/q_<(DoV>uwWg<~1 3 =}vNd4#I UcnG뮥AUes C#TPFuj:w旍n-MΏ\L0X8n+.gt\،ٮ0M{G8;1Z~̘yz|ݍe2uSʛ AXZeD_LCr5+!ڢ'r0$j6 }D*JJj4+dZ MCZ&kI*])pCSUUZ,Ù>]gCk*_E줅ID lsK&6צli\/yM$!֣Լeɢ)+u>~ UyvFWvGO.яW趻b!(4K^R&3m2.!,VFn;Y]ZDZ)0KǢ[T :IHYJG{o3 Un eH=0kAk_]q,3܅Hqx_U/RkAō%򙘋ʻo9|չi6yUR{,[*o!&6(eNIWM5Wrr>\޶e/RU,ϒlz2 Ͷ"ϱ2fȗa[ViI"yZ?DkJkK-&cfcլ޳ڲq %b6ָNN(>nucs,\1J__n,q nܗz]Gim@1Zj9L\e%`9V%t(軽.rs:sMc 3+sٵ%4s-[w ]Ok @uf.]\Bw k(|K!P\P1væ|~5p 1~~~!Tw2c}aEsvڃ/؀٠3ƺ}ކހ Lk@pڏGo[s2̳!udn~c?z{\Hשy I"H\*i^cJ3e>ir- @<{`4!|} G/n `c%%[˾/2bުр$h4BnPW}N!_wҀ|B4}+@)E}Debݣ.-}4'QU7' ch[FaHzIkO+. -8tSV\ 0(VLJ~KۻW?o`\Y7ţ:>?" G שBcI8re@T_2i 7\DC @_0(pi:@ՏsBa}E@PnCB㰴 '=iB<^Jp30]Vk90>q}[=p-#R}b@A"& 4]! /!5D FCZ|GK 5@౾9(NB{4B" p @(yF l֓q|D9mK~8֗@ڽtA$~_< e)~GXB=H:"_I#HnIx}qC(n?bJWI `i_DF޻Ha ![.&ŜW oAKynyJ+W/Y33.ސ?]/uT4 C҇~ ةA_,Ki zb: 4]_gp`;E\?p*Ǩh\Stzg XQ2㚔%2+ylY[ &.1[͙oU-b12Fo#wr f" Dzh4/2GPy<\Һ\ .{jf'hbmOI \dv 7Iȫ,lZ^5(P|)\dAD:7u~6 "3Iy vq(Fe8՜|0nY` ;:sE5T'ҕDz%2gj lVC{Լ~ O&%[%W?Wty:Z_[JJKxw';JYWN˽27XLH"Mt`zfx)Us+IQixLQF!(CL>m't6N"¿4ՖPc]niI?fj3mšV>o`ٵZÞ"ATC :FSWk'9Vk 8S 4.lH7똜Ǜ\llVأlX6P$V9Q@܂܀rOIQG  7⣳f1=F.PK 6I ](p<[[GG@[tZhӎx KsQ x&n_4%le4|CJ$7_~{}^ܡKr 2 O}~S w}O%A[ުZvl-zcqn:/4&B3 R;MǶx<2A"0/bNB4jZ #MA24HY@nŊAjNR4 M𬞡oGcER<h[P}X*:PQjMZ_~I>?5UB #I(݅е!)cP3G;tq]؝Fv}=J#ޓR{BUBuv'yΞ:;V e;v'=P'lBUBuw'};@;$ہPPB G'UB #I(߁``B Gh'Btc#लq?]N [}m3)W Ԧdx*z0pͪěFl~S,$.odRIozIz+n^ҳoZ[͐h"B3^M&k A: 3̏Q۪)S4'YU)%Q%a6d˦9iPGHrtQ /ߩbMĥŋ\qJ' gePzh[s~"Uyc<-us0n S;F͐weRX*!ϰުQ ;c4%:N?xNF0Cel44ɟ ||#4q`ԉ}IUj(-j0TO jH L㙶Rh۳KGFhAp,_7Ro/ a-k"綔G%`=պi9~Ƹ H}e"{Y"yͭ×U>.8q]P;cV$̗n0_L&axl˚B&e41 fr\,wSai@4'leV7{(,k Z) 5i/iUrxk4Y DH-1Ⱡ(NaV ցqi<ˁgzɴ2a}],w_S&<·fV2,ZIL1H9ZVW 8Y3B;+-\Kc WZ?H2o -tV/̚GEr%/4Ƭk@[__ Įj5~IyVk:wܼ\uRW7H9H4z9xwVnY-f*l>@| =V!eW~ߪ*I^GlgK>F1?3 e\ ʛs>I"1*;X) nV"&ƙզ?3E@sKhT! ԰ҫWpr]JחMXS@6tͣ'G8>±C -`WZwMjRX82,~Y&WF_i-&x+0i,:Qgi]twꊕ۳$-<ǖʊb}a;Lё巼Du"KOEaK)W%ɽho6$yVlH c2_؀9"ef+Tz[\ڔHXR]-Ԗf;Nmy D7z.%zZPI4EzdecrEEyƋD L885vGX>ѿ1a@U)$w"ZD"Z+9z iϪO,M'37EC<.S@]3NѷP84ji.SӘ(KGmS\א,,u"Mu<9<D8a( z72H[DmjgeZk"b ۳Rb ;Rӊ,=4f! H`ߕ*"ߦѽ\f$M|3@#gKPtDw"[ߙ> :u=آƠya8tPS%(*gyL/-~qJ" E:NaWӇ< |sЦ ŏ!`;"9\jk:7xU97aV}\VBq19ܺM@7hy2 l׃e~Ʒt ~:h#_Bz wyW֑dšg#a`y&V"HY_?Y @!Rog9@[v8 VUn'Ob6ǰ~om7s2RFYu^xwQgd<~Z&i~ Y^9s,^ݿXhy년kbf~7Cp(Q箎a o/!5EyJ> L[΋)`L`( Q;xa z/bZ֐}`ﭬdr IG6>R,AC90`\ SIz61"d,TQ31gڏ0|i|X$C` O9p4Nh+7Diֽ֕4NuK;K}m蹗nh6@.odHJbe)2%"&bJ#K0< ZX n*vE֜Sг`z1T X1v30>D8rkS>@%l.Ү+fqb(Âs<#T@eL6u`k#Xb|8\ȩ*3-DD׾3 8%8N*aзPfqURԠ{t2)Yw=&_hY';9%laD*jɤt'8SpY=vZ<*nb:)R( >/. ''1ߊ0sc]*nx})ήrxsv(#pƍK rc\md%c ^?|Jt)UA15p~gNυNh bg!,\L"KOq3cX7l9o8M 43A533wVq'gIKo,15=-,Ź%؏qYEzIƁ] 6&åzBu]WiR,G6XPs_C[/k9Loo#| !Qv)`,#Ń*Gar䨳VDj`c k+XbEbv.N.!mdňCrQ\0st|2s3h@0 amA3iۨnZ~8ܧZbz!Zt&9Y#6Es8KĂct_~1 4fXX:qBaхHGQNfbE@KB&=K>Rpn0 5«' DWmcUuXz{Y9\1b dg /T  s EaT~=|Y~cI)@#.[O>j;Zr> hRm0R1^a"d%hw0([LWxr" `Ϡ8\ |4tU6CABgmPA|,c.΍Ƶ-XY`+ɩBKKPNղ@lD9 nnP]ZTeئlFAnʬXg?lRi]h))}QzpoJ7(SֵZ21% UA*6 EbF&~u,c,UӠ`0RYR;إۇMqW&רrJcEQcЙܵDɄCw*j/o0.`j\C£X:{Sjn i1BAvVPDVȽX:Ӟ}ϣ!jUCUG8j ̆!cg?)j'ҡ1DX:ğ  69Qڣ"r"֭MfbLHъP_҉f{G~oJJ NL_+0ybT,波%y}yO=0=1cYpzB`ߺN\ "˓b nK8ofFM΄\N%b{Qnr;ZW #c1Z'IFAF, f}KgJ?d+*-[GJ-F<$i(z!:Ko<w+ J^'2>5VnS/- ZhU5㖲EL' \'}XYA38' [4~gAP]VN`Sr~wQOf O-߮eq D57&%"웍rX{@/ƠTf!9'uŠ1Cs$VR:Y7bG/m2%:P!h0IdLŲ3{XXO<8aR$'&ke B'(9HB2Jh' .G- qvA2Ոl%VS/.Ɂ]˪ Wq\Tr`Q㳒f5ZzI'Y 5&3缹BéFH2<7hͻW~XyTkr}tQXW)#wYqAw/}goYXhP8o nXpg( %arn-KT[Poq=`اlAe.۱;޾X|Gky[L"y/ڍa1;AJ1V9Ꮚ\,(EϘ3_*Zt6# ~1\"ըsrJdQM̡Wx0u B#Q,,O|AE)8,={9TA2(ۢN!=zҎ3N8әQ7Gm w=z/d]l{#;6}]39ϟ>V4}T[ʂd1Y[Dc "0k˄$zel: KnrkmؕED8cY',@(g7ѵ :?㝊 E{!g]ۢAs{q}1R;k[kJ SJ\a*?Iˮm>?9ִgq(a2Kk; K( l[E{ seEfR,8o8{Z^/- rDjq6WIQG-cf~T0kJ](s8YgҢLUbn,htǑA`@Dע`|o͖GwY1 EJ!-dvtҘ[b1q17Ȓ-ؕ38*J({Q,WwLb3z5+t,ZҘ@ Ms >A%Hz3wmWv,@w+w|>Z^vv1uH!kP 99v-9(0jxz"5uNg<}Xp]qmfQmkC9YlSiĥwZr4gcԢPCiA]^5Y]m0ăg*7,8dsx<2G R~ϮMNUOTKX:qod׋LZ{࠳dujZeHR}{[,8NǽRMfOEj j6q|IǝCĸ c8ܓej?b7lФ-Gʪ\ʾkf!; =l\]߱u:ꋪU/)n+#/kTpJ*3yO.nᅩm\ Wey}-0w˸* hvy`䌾)0!zkFoQd8wbӆG+d-Ux뼖#-LAVݘD)m,krQ8=y.:́AtL2"A1ڜd)KIyE#k#Na e*{Inح=BTyH5|K鶳82~ZFi?=O/7hEOR5~٤f5ˣ&*4.:w3A 9pR_T,kr\{m{J4\nv"Ҕu3۟Z_Dƙ㢼׷4p̌>j{K:kMSf0K^s FNtazsXp4S+u}K}juݏ~K')Bػmlp<][zzvC$VL{G$z2ǑHw84oIE7- 2vjկܣg! B)בf8Rl'5.6L"34:bp/'Ĕ.걚NPh:c>(MGQOnw,s;O ?hrSK`TmK7W 1zrC|//^Ց`&Um';eu y~m.QXܜ GC[?h? |D_Iϑ)Fy v\CL~2\\ ?Gb *P(DJp%M&Nm{; q==uV|1vQE؞;+o?!|lb!o'ɷf3PG(8( z|c]bCp~qVdbYΙ 4#Р83ԊJ{(:ޘ鍧Μd?;@t@ˏDHԒ%|Ͱ _Tnz,ad 2P@W@zqѸưsev-]h(8 V| 80nIm;z|)N^s5M7 >չgf*/_V2Yz'+~yio}`'ifܕ7F9Z#-^o9Y|S~oЛq_g4$_ jM 1W2?>kIlDLG] ۆGhph1LӖ{eYw֦Da%="FAPWC y#S9"H ;nA|V|h>| -Ppj(D}鱂+<@Si I4,dWEߥ/˂i`~]邾MI#w`jQ/fߍCӿ /,r]Au+%CxП"Y7ΑP=G5䞻`$u-e׉WqCIX]WP1'<6pA,GhL d#y/)}s[-1KeEWe _ ]j'`潉M <ʮ~ eJT(σ \Sz)yIWxuQU74*AôsSJCqJf>HeIRi7}7ukjo|%ڇrJ<Ϟ1r_"ŭ.rzT| bZ>!TȈ4~}QTd]zbF>?h4f{ԗK5 qyzI'%>`\vݾ\}M$6H&G-WSiJ+K`f/4^I+m8\KOEK$Ll Wry)!+@JnPI{xJRM-uIZ L*"n`reqW&|rW9[ ]MxtWsmX4N ۅP1QD< [Il!l[u YAm \Hmp^fcv7! B.& mS"]}X%imWXPmPET6gǫnϥn0M~D\|>At{jӊKD-p42zdm#&;p&y直#uB*B˿}up2 2tm]Ak+݅0~F dn ,=<[\%X2B#9~+= '^,nRv|~kdR^D rDw9"ΏlwijMz+'A]&u8ƈrtw".~&Wh0 "7W[,J^vs~Gfy.i'qP2A%?l?럇3]vZA?%Z4e)X),>oZ.^=ݼ*5')q?߰t3xbLj:gd,Mp,)Jy xX왏XfOYڛng3"8 5"Dds+PTX.hx|5%Kv5:6͡i ME0Bf)L':K3A/-{ӽI;\51Z3g+W1+)@OS#eJNSeJ>?r=쩏2R-&`yFvF:d.B?; g?JL$ 76mCW?a{b&eޢpN"h`vADlY&?t}[ꃃY'ROtKlwAK/wtp B~探[-} Whn:qcM$C>/1_ﺀlSyu;X/qf2V-~5QPE/7$x[|(!{`+}gXP'hf =0fzEI)tq׭v2{/4Wq_&^ 4[rO`f`>M‿gQRPl&A/@2wۦw}l?}g avjqg{ɱޫE$G KwswKwqM5oM߹=V w:ۤgnϰ˿ `S%vwX/==0%R(%F( nq/)sveG7CfB`{1)kfݦ@`բCTe?'C,MP^!DSx{ +J~< t6[]A$wyW~bzV vbtQF>6^7{[4N{;M0#igG# %StkWGwy}.SۆEXXA"v9﷌{bg.L(B[5/a- nƶ"|>u$M}nU(.+o^.&"& ,J3S VR2ecj4GrK)O{wwnqQzS귨omv'r;&Co3^۠Qozi C?tߚ,jF" SWCFƤd -(MNLbRι!Y$LֱN^Y36TK36^C3B(VČ1 qI`$U@7$!*Yd+kF݆jiFkh( k5Y2jZiHR,$e0x4F8(I3\ꔦ"ql$+kF݆jiF s˦8#"0)+5 iLfL16aJQzQ5C P lJqI~R<'qϱ DEpWBo$i_p2ՍTxR"UTO@YYQ6 8k2Gڮch hU!q哻Q]~P8‰&H/*-w 9`pT?y MygrO3MURȽ4UX!癦j=<]MZF8$VNiZc :+7.Bn8-U_RyR*#Нr֣#,•緁λ2ye>;ThjsPS8貼*TTi@XIt%U૛(6 ߊe?@.ˬdLF- UGh.ˌf\4hJʰ$Tƅ2BeXGJ7p\Cm EFg:E(Q3C,6`4O%6Lb(45΄6R`c5OlB1f{ r 3g(A0uӁi,cH2KE-3*a!BQ4m<*z0*Js(jÈd˂HKV)žWZJj6([G < 6 PU:N3n̒ф+K Yjp7`ilZ6s]`A)!4<(#0+B"&*e@$ӒzN~D"qK2oB9kg@)amPt!V 2ƂC!oG ;ilʘd*iS1Ujc5ǖb`!2m Q, @e (Kp-FapeBJE2&V3F(f("qKh΃Jܬ7Ζ=iW 7=w F$z[G*a'SW E|+,|-jZ#-PZpFz"AמAcF9iC/QL\hL6^P JPX#Zt>&KFoi". dc+lIc5pBLHJHMRZmJnSVi";wsڠo&դe^4Z%Ș F4B I?i !%ˮ_baQ_tX~>\I[<6u ] ;oLxjaIϱ7G_M1"y17kff֟jBUƯ~ϟ\#Ndm v9WF390_+b~GvN0]gܾ|َ_I]q㿹5rfz=59|' `;ڃR|4nyߡB3|08)q@d0$&\(#w$rcwd`f{6B!ڇљu~gbCȬ\ B0fY qS57Ʀ):5 Zr. d AL3(hKfك JPqIs+)I0(e\ghLfQ\TbDS1s"8 }]moF+p2Hw{`w2"ݜэ,9 ߯eZl(db XE>U]]]U]]q3*XLUyqI'"oTg4LCl?:YPJ⤷:sH4TEyz!!tjUT9B#aMCRy'Ie*TJ K:6a$C?չb02IOҀyAa!{$ JZɑ AjJ:[l*fm|$_]\u!w(!2.<daݫ;k0?GVZL>~B(V]P21G#^p wLOdUJ)9tC4UMށW$eʲX,1N )qF!)NMx_՜c*TjDO㶪VT}ى2qe18-7BCo >I-KV ۭ3q^WvnѴZ!_)Gʦ_KM E8OeaB'EInrwC=N||xcm2Y~^m' Yqmp޻6+ॻ0YK|p"8g߀&@~ blf|l AaWpR ZxQp1 &Ӽh?ShΟȪ)[%hpӻ*A PA6X n.p4 caƢ3[0XjDmZ0h%ǙrjQ)ʐ+'Q(&ΫҨGcJ{jV{0 \-*pӭ7QcBG:g/DqiVXQnSO ѩ7ym//4Y%vT΋q/oyמSGk X򕙞d͙ ca\Ma]!,}35ydE.HVY,dCS0E 9L5=?ad>s?kM!&Q *j ">-R pEj\PwhFۇVMyKU/#F#x҂gon4d+S%X*b2yVno,6kQ$l@U6>j9ϭyi*{%Zɧ^D9X Ռ'F*FS!yj”VS|˝^5zGGPF(֝ͦ#"S~]t>x%b,_.V|PXyyY~ΓyV&s.]sJZeZgZ~i:Ua|=7dn,&?JVx _1GyҐ/ә _̯{ImݛK2_m sʎ]]INw<3GEk#,-!.7`_(pU6sCÿ=G[4sxkz\`fL%4{$9aX/(\F,( oD ZimۮDKY M4}kpa{"NW*25Er# ucfԺR _/i/s"p6^'%|wPz>(dn+@o&,/S̔,(ax~]t'0P&WO͡W烣.[Jj" IrTe0<ۤ/r8+/nvNNxB4@'B]0U=8yw2O?F 3 S)gS l4,O!O,z}|O916?<sc̲rq9"_u.'< LF5GϚ-TlslQ~~~~ϓZzWѼ#z9ZxmzvR!pochz;IF{3:*sUHm/WJ0nU!Fqr;+MD6l-H@(%:u4 P~Z3@@!gENz灀֕nbe"N kGv8b=LFg#'JǍÕW% ȱU骬9]TVtVu$ᓤ7\*JROߺ*{yUy Z덣\}5*ie5 DJVqsKvv~洽[Iqh_ˎOk{*}:,] յǼEiC|]\R>S]u  ZľVZS_~BL,gi+aG@}r;WlX`8/׮Sw[uP'/ߛ+Sʃ=dJwE&D . pMQpXӥq+L(&ِpy\쩾L'V8;HMkTanQP?pt|8T}t=N8CUр-g1'> Gm]@OXwG:a fƙ]rh_Ox ,Hc H-ZIF}HUp0R;ě| H(^0C*8rqM 1A)FQ),m3ZXN ɳChӷJ9eA ,O ġ\v|-UI<<)$lT _q6` N9+r.~|*z~_>0a^Sl\ g+BJP"9x0@\)1Z" P0TaZI+'ƞpz_HȮg{Tօ^+6#1)5CPl@6T * Ȇ dCXPl@6Pl@6T * Ȇ dCPl@6T * Ȇ dCPl@6T * Ȇ dCo@6T * Ȇ dC!9VIMPk84{ Ž^CqP׀Ԇ^CqPk(5Pk(5)]19#Cq'{ Ž^CqPk(1jiΦ$%4__ϦI/ tRQΌQ=KUeTʤŔ9 gÞWBBXJ 5%DiPWBuP!8u-)-lʹ P*)ǎnly{W#^Xv) +/ i(\`n ss07|_ ~4悯h'F]q8^,>$'AyRmJvw?d<Y{YղU]EX+\>$* 'MxVpI,8а6 ݯ~1{U_mz/U-ia'w@9FνP-М[f th>?w_%BU0VYĽ"\#ȬV8@R[!z+,>MfTlcىZTQc!F0> ¨,?XqI` vxڷ<4Ў9UԲiUU}z%?Ks l(UΣ*|*MÎ%`}|f.-+$&e.8("vQlc8xA,7xQ {7YNL(%EPx,"6'&P^u~&t txjo|Wl,xHP  EZo؜ty{qs"X5]Z7klN_Eܷn+ =ֺAo.rz ,ޒm>ĒZ](`=3B=}30x,jƶy= )Kl9Y1S!PCQц9uG_)5Gs=#ֱHe[z+XC],|O2Իx2^< WP*dh$q Θ#NmBs,}Y+72J,`> `嶰쾾<5Y{2ぺI! 0gjd,(oq~qy6^%7o龁xmOk/3+]}g8 l'h&rI$"\BM@We6;(=">̍X9O-^:`WKpk+'|_Hq*-XJp`kQtG<2[EJ*߳:yT9byex2(bVRo""ɣ9Ow؜G/9;6B1IA.:C} 9v<`9,B>p<8Q4)hgre>߼{Jh`(D"y@ ";Gk%2Z,==? _n31sk31217cn[;s+uKq}#RzBpmIQ ,V`Y}[v}gƥ>|+5FF˺9}B[cI&NVK|ƂfaiAsdЀN?M0[w+st#gLItU_MsiPpn4!f`Sdo4my_ I"kj@z_xWPrkFG!~ 81E=R` gHa6=r<2q hGR>-7ڬ h8j5r#E".'xq$Z'XyBlu+fAhqY>)͏'3gp,{?n57\__*ݺ׫l'-4DvHey[nLPxP4q2Jp䵦VDCg){b#.E "q2 񘻠M61$1' >ipDejv.=5<.zה>GHZkVzfe>RswK;v=vYr8v[ne Bup b>赇Fl5.% g^=O2a% n-l_Z>9E.UYp+J V% A=هѕJmѹ,ۋeݐ"C`B|^>e϶#EL&i: KmN óbT2oH"˅kg"fXHJJVRՙRa3Oq؄eDq<"* I_zk]#F | Yle2'F+xcd Nޜ)dChH A>= ~*6%gO8'Z>VW}eãfSi\_ 1U8ApK [1def@%ãw^~dfyrBL`ށR6^fdPP}R+O= "77"J\FqHOigx"ZuKzq"m-Z>R 뚆ޒ I eQrn܈leWhH]Co!ϵРcEK&d>a[t+j‚яAMt}X''Ħi6i:K4kNhQO:(1 H}&W%U*|:`}<*lX*Y3RĈXf6Kg.7  Y>RfTS\My g:̀ʡ5WIQ Gi666 <$D6r*pgB<ԞZ>NS9#>h~)̍F$e1K"z)5eR6hkc"pqTs)F9m_csd#ʇ(.G2tKIy<6shHf,K>W$LU"*>ސ!=<*U*3N~4?G3%1j \=<*v%qXtYfQ6/aE:PڒF;Z>V䲪<īCN5/Az\z%GUrmKtDF@3O`r |c3I>߸!xe L/n>L" {aF=5FlÀarh"tF<@u@zxT's}jhMUuJE_wP5CL97<Vxz?J 6zY ۫OT%5n|ʇ!h21qI"MBNC)g,0ʨ<E{]}O,+r6_~n݆)H[ܤ*zh$T) :dʚ0 \09ZD@=<*FafB3(ۇÐG5Fk]]?Ye ~ *:^p׉jŕ 2Ziݮ:0zI𨸨OQKjazZ(6N,bdVov !S?8mc|gʟǟ'"27ӝM5ۄlFUuݠzi:wP\jЌ+V|Bjv~4hpUŠx+g0 _ SOP@g)p)|B"Q8DTx]Z>V(|ZWxHHW5p9g,2{v"#13)! ({CѠc=aa_clKYU< 8D5YY/!˅8pZ>v_үxjRzJąhf\ŐvPV53e'$H}SA/zW8Cikg~#SZ1&P'~ynѝ]߶YG,Q Ѱ .X؝bLhȽ xƩFEa5aWb2Q)aSGdw9FQƳ˒n=)smZ>R0eC5B?_ ˜ WkQf &*&,7ͥ r>kbZ@^ C+O@{xTd^YH>xw-܀GHksTtȑ!Fp ZQ4; Rtt1vСM~EZMX8)h{w@1ZM٦2 &i)G rVINt@kh0̼[\ٺ߀)5 %4q8r|O!Ԫ^=UlWĀi^3{Yl [Z;*lZOZ,sz\hwq!b6?//ã#ƚL:T$dJFE3mA}Y>Vr> r@B9M|2_?zKmh}@q"75hPfo ӃfC$YWlZXݚݚfg 6)vp% {xTBco .l6O@T=<*2D'x.<ǯG5ՆN8qkx4HgeI6ZcS$2˞W3y]Z>Vl|ů֖# 1.Ud9_WR%q5DZE~#Yk5$ãW/iujO닩f<_o}{xT\n3Lb(Xqz ~mAz" 0Jtt [" IpCK]0s|Oш^~p&bV"ېۧ/E)r-T/_}Yuq;w9kCy)wJ>N/v ILˆ"qkJXVً66O9W7ߢ!cCiFonTXzdGS?qD4T>!gE8qڄoi+6ȗ+q*euf5n_C]Oi@Ih?zx$[#t>ah\ׁ6O=f*39#r2o݅Cz~JNK<[`/<.`4Ms) d V~c$fY`䨇3l5,qB<=pxj*lK_b~l\?8QH1Zo9TMfH(4Ltr |ȉS>htCeIiӐIx14wm=t :w4Tb\H#CB"JAtD<&OBL~ٴ/yR:O,=^=X|0>rXΨO,38Y;S N P%Ii%@xSn!qlw;R h1R$U$+b8eM6>ӗOB]l>2 4Sqb z5E 2:L*9~^(v8p7)HֻE5^9-<:DjܗX$q]o=9XZxŔP:bPt#˷[)Qdy(o*E6. D#A Γ qW@HѳFa?Gy!LJ66YF:CIb R%.$Ngb9)|A r9E/,O'JL +u\vB2J?c_}cUDwvf,xކѕżQ뫰^5,jMRZ$_ߕV&of.y߮-WN~}` |eчݧۿfH\|zm8d6ՅƅKXɘ"ޡ_e8$3-2|bZ^(~p1ZY7!{DžM~lLQNL[ k#?~?اmfm~@ ;yS@;_G޵>_ \yN컺!c#Rp8\ ܕtrWL7h~.u,@Am?W{߯*_~~1eI?zR>e|_̿Tyԗ=׏<|X_nopIﻹK_l]ܕl?SAy4,tx7D?f?1Rx(] jlvgN͏˪ 4;>i헁u4"evUܓG8ࣇ.. k+ uUyVPxVJMm)BեS>ذ= 3IWEo[i?>-[y/盨Nm>^>,}G*aQ6nfU:P-gOlqiyEc6ijOAnVT[eqzZ?]`vyNh٦Um7_CGsr lwo2n?{oenҋOZ̶9]Ҕetl]Im^TKϳgu޼QM"PsUJD(20,"ߎZRmQ]j~*Z#G 佝 ӛ8_ǴI}xgs^_cRDj}1[Rip@7M{9[~CXUED]x" ,=.CaR&/VM)mi:@X I1ᘉ#2_%xkљq62l~jOU Ji'VyM|=j1)#Xw2AS2ӾKmx_O- \PS \n)qF5FH&㪹@@_O!!9l%^;(^uBHseIp7R6(#e"Ҥs>kTtѯa:[آH$n-9FI.rAj%ڢ-^m"Qns@#/>g;"{,DN/wDhz[G'ōcTaE!ُ؄MO3Ls4QMI#R;kb52nB+ *.8>3e#i8V˶aS#híq6Ӣob+{.k|!tI iMQSw'oS* ܖP"v1mb:'ZZHt/IF,5]* k]6En#vx7[(n&y{3MS uU'kmTo(,Ilݤ<7fLU8 HP+ Ƀ,Y4ݚAk:}4zy&Ρj3O5tmԬbCG@/7o}՜'X>?߫?w8 ʌm;J|urL#G}~A~#~t#<ƎKrGj(bVh3c/+݁ҥX:%]Wp,_ݝ.04\ײ!\gӨ5q{[w4zEq6r` O2%BEo=.^HaP8 5k^aǭZ`A*і,h.KVn"8i ]+~&!9j'><7NyE-hDJʂՄ|%*Tx1? _1С4(RhIk/{Ur* c~} rNT|/03c^tE_+zg|zc|+'`q̠TۢN9GJʲ._h,X%KXC.olyG5x_$bq:ԐӸJt߫ϺTN!1^8z}z-9~'bru4$ELEp"_x!epVB*лf\'A. xot S BPf;GH۲$ԒRׄ)]Wdea~gCC `p V#U"{AcdY  9bM/V$.K&ZT+"fѮӅwu7!rf:F<)抰Rԃ&A9E \~[Bf-EbBL ..~ӸAg#D*$M>]#{޺T`/hF?FsT R>۴htu,E(KT3xp||;-8Y$*`Y>>ރ%ڢld: lu<8 hFl5Fjh~Y?'EHc m4Aޢ‹`ޢJì8U5^5I'Nve+\ai x8 nȰ~Jʴ㙜vJwPXly't *UZTƱC6'-;+շЋ9Nr1ugRG=~pZ B缌l v%WTa;tV!ںԌ<41b\ UlPжLjlu5 K!EuK mrC0zaQ.UnsE^x/>a!uyIe<ނs R^Y?Xd1g[Y@sT+;!DeI%3zbkY%8/Qi٢@__!`jldQMg5^݌%kr- Usv{ʌ9ơ dh"2΍nuF:zhŴEU6kh"(+Њiw.t||.^Ү+l43X)[/.w8kx9fKdPWhqMqd\$5k. =898` ,C̨`% rꚄV}d| Aw<^C,K,TΒ_ A/beU_A|aQH~?znL2PӸwS1!1v+Gڶ GWqS0sL_TX&(=^ IGa |AZul j:z Oʉ3]̖a/m~/"ы Yȱ_Z6r%żh wCφ:CxbZ/\BWB8A+U̹=+-T&hܖ{P}_@/ ,uڬż' :<d_Ot~4F*0+m0>ZVh{ PY';{b}97N!<% ˦2Ѯhټho[$m1~}/*R'??#lO92عsj⢇[@-f#ϨckJ[@W8vKc(Evmyhq628Qh%@K`C\ z 9U-bE?‡EEoխߞ;Gov6d#wKZSP~U_ -=`,'Du!qĘqϺT.t<*H{ wnIde f#ږ*XZ+D.sq3bp~5`}m]@wCP}mȢF=⾃4zq6HHKOiESn)= $ A4a:؎Fg) !~Q< rT,3N5"aT+ ufI4<-Z3D!˲Q>/{Jcpa7$Y`_ ɾ$XP$ewV"}-&AfVYGXUKEzw@p8 \xJTi%4*P%其؆#1&YHOO"l=UA܁ QbhJ;k^M띆@4V3aL >ly֝ &Mu: 0"Wn\I FֵJ@NRYU@k-?K50%3^s0q~n1@Ñ"kahK ]g-4QNAt!V 2jZVFKnC=B7%%"n=+dbFq-/kTbĀhOfJJ!Nm`pXzw;g!m`0P( kKr٭;97n8m-gGkRQjlM08fq!VP;.zHTG!π$z IG:O^(۶EjJp|6~VIl&MSXE/#{fA] Z;熣]-w_b~}~{ c3>ʤLC'e:B(?f|&dX5F̵CƁ|{O֛ns0ʂB5 `,82&c`6CoǛY `Ȼ vkpQL -g?=/ Y!  p?BfƷk߼^mtO^ٍޛy:<ЃLNP40mcgshV@'R"jf|Z>/4Z,*NZis݈<,E0@wwQc9DՇ4r TveQ>'LO2+ɬ^?0 LB{5׻A7Jg3y.sv7eiͦٓfSґ3hȶ{NcnZgߔ1Ōr2O<2?==G'[*g%E{--Ok[D#dJ GM*N49cR'<_=ֿtlZz9q( c5B9=98LJ.|F7,43Z,rGb/~+W\-A/Z?c@LgUBaQC T-T% W?Gsp6wYO4yaLTRA5_6ղ^^Q(Zuެ;X@m?7G%;5"9WQLo3G*Tb_i`*7 x0V4+tϿ?x̦0EQNDԼbC*ʲ45* S)$f5ݿO?}?e~;#{xan=ok25NnG ~[2Zbi<õkfgǃ^, ˳o=x4?9{sy_h6+]zY\?bǔ5^?AÞټ| ZnG9{ xwvu^|i6-<;f=]_/'h z7 ]{ܵcڏȾ<{ݾm9t|ymn~Nz>IQaw =p*A CoKh}s 5˗Jg/|YDVwo*SuwW}GYgVM8~­~͍, N*yc Tr|v[#ҵ5s_jaFoC{Ziv1L0# Ćec0e;8G- qV|je,m.>e@t w?m[?AJ<;A ;dI R}b07[봷<;;rɴG8bnnCPr睧am+f$|%${wm;c7ᴐ +}t ҿX;)i!,}'(¥`I}ªV. a(l]Z afb"(.&Ep5b^VֈiV,Jt1~.7ň"!f!8ּ^Z89(As:x! ™k?q?ƈl UrFDAFSpK#18\d7X8}t) EC ֵV6*(x|1qȄ:4#^/=KHBOzO Q;:<}pR"ZJW\ I ,|@}%$GòR&jyh$'TX\U>Ϡ")BZNFG&ws T4 H!kPhiqI G4(XYϰ>S2) 0 ;N C;U=bL}}yIt&K%SA9r2®pi?alr/#Au{Z钂 J ]*0EC׿p3tm^؆B#Ti( >G4#n>ewvO!a'aJjD]Z'58%)3>ok ۾8DrpJzwpޓSErpϝہ2'G(ey\ e{CȾA'DiA#K%%Ruaq/WIU h~e4CI$5[qq#"MF!U8&A{E2Qܗ-#8s7yD}j[-,[B51BSW Ν^} |Ep̥e(4r| .Av<]rQ>H K.es 1*`uuXBA֭'#yq f)#|t {GCb-<7sAPa񉠎_^Q*ă-ˇFne %Q R4hA~J4!I&]JYo[_Z-65f{nbyۻGz[HjZc0S wVԈvkI4S߁UM{IS?ʏ7#\]]>FL5*|9'4Fbp"-{'QiOOz=Y7M;_uh1"g?G -,  XgU(#>="Y4&AD%--X(..ywb0&X? 6c>UaL%{PA~J)feΔ4)}7*Z*(`QH٩#d$,b- &v@6noqq5XWI׉шַ+}6IEhJ#f-yyZPH`K+VeYWzDR uFbpgv*$ecB MM>aF͗D3 JgO:O ٠5)S `]A#'sX$4@#18s͍Ϯ'յV7W{I+>x*zo Q*N "&ƭ92LX(r݄*eJTJ-G&!%gm? Jq0@#182˛bD朠yFbp"^.cOB0#0?|#Ĥ ^z=#?)h۬oFb$q;74! HHϴ;"e 62'*>V#|B<h'F6aj[vK*1Ztw!%7,n5D=O"14/ъBҟԊ$c/S!x&qmFbpbʧaqcbrNK-W7S,u^Py,L%e2Cj*cwn9fBI$("u6]T8S˰u::;k!k^\4CDÓH2)Y>x0 p|uccONnLϻ~9Htq0w-mũl>E@pO767Hc{-;mv~,o'vZMcf8͐ϛk\$va({l_orY4gzx$ÆA_ziJ~Ɖ~0!CWaxя뉹 P1uQ&Z7 y&BkNy1:Xvo,7]o,++00vVh*3nh(rUkZ9Bj%0 P:k?tl`,Dx9mOdG`(AMlBk MB}5ҳ7~oOJj k.kByr-י,!I[ӾtyE0AD<"" W/OD(j} RM/2¤KDжBTRġׇPAN 5 32\]QrMpAq ܇Ij*,-gT<G0SOZuP-@OZZ jWv}ӅwKUU{kآ[gZݽnVl=iyOg3ܫ{g3͡3B.:;!7$ugԁ;+D4N)qA%v2eưSTY2p)@k&MYJTHkn7CRP*aΒDTInxI*1'R*? r I푔%8ZFy+ D4!t,ZɕDR5n'b\MA`HTXʙά{ 9:b p"L\aHف4M7H,LNy*D`LPÕ4 )#Θ1\JShZmS?Fz{/@Isx.Fg0\&d'Y`N#f*Y3Wμ$64{Qv: 7󗣽pWq5Ξ 'DʲL,0FJLr0S:EU;JUO_ziu<" /\&yzI+7 HߴT{St+?ݾoY?)3fYQX}GF+Jlo"5rIoZW黪?@ٟ=О~.VLQ;U鷃a>陼n3o$`1+1 ᇲ7߼ 44ɴg/'TbJiꯗ^l@[C'gQy]nml (}hCۗE!΋VV?8@MEB^[Z uç~Eu>y9փ}8t? RE,ᤢᓝ\/qϨuUIN2WplUǟS}#"s50s[wQ|(9_i“.l ZN㌦\ I&-L&V NeZ'tY՝g;Oxp{=)H}聀ڢ"| $<L}3iFGCձ+7?)~zl;"-߾/xjn0|?Z?okB]MrͼbZOER|‹YɲŲPE._* q< &*H3*yW̟"U~ L]0{Q<>Tr@(_Q^[ ;jZc 5Fg/cC;ۭp=l*F?t =⾖Yw蝿w2NF?UQwu8vČŢ]?SLPe~B#"} 6в}#PPʵJ;2SQ>,a NRHiP΁(gYоr\q8[]^TW=U_^_ hW?O>jhm'֛a\̏qɛbBq*EZk%AoA`~i2nD.K#cOݣz5ۃ q!2=6; @̭ >FZ[S~܋V4uck,EJÀ:;%AJ9PApFe=-JP%\. A2c4ʰTK-S.͉$IP8 Y~,쏛AG!kf j,L0Kr<ŃEVFϽP ~|=gCMV_z<- 5̪(+6#fZ 1TjŒ3,82$ON(V$jǜHdĦ쏗L6g"2#Bp&$a'LL*6 NCߜ3@윁_D:@K{QgɊ?dGҠ-Grb1̘T4!\ZT"QJT#2,0"~gF#4eGLNMm<̄ !2K66Si2ŅK\"'1`E՗,|]tv4bi (GBB\b˝^}vA#aI~Ǣrd^v41BYK'蝻ES?S$1󢞶sehSp*XahôY8\Et^m7]U,zD%iom7av`Nunpjbgjl@-8G/B[+TAX^XKp`*pDPD#~+J15m:߶ 뫸|7[&i#3*dLВranmw\u!{8~ybCL<%z%OfE*œ~e}R Go?,$K"BݛܼŜG}\ݭ pUǔAd9_2:2Ș??w/@a ;a7,RWᤎ" wZ/e  .›|T^ػFKykJuwA5ܣ, E>+P;= ߟ,Q'yǏ* Cju<[ =[dxo`݇sG݅is>4Ur~no#7~fyމ\枉pYl.ˀ'wrB(8%[E׏QlW "H63 z"*'N,rK&+x,sUB1]yr>^% %)փkbN+ Ш%{]yZv`£5YZx!{(}Z0&&Q<+] h-{{٠ދͅ\!wɑ-4J&R9N!,S8R0cp>j1'4AMSg8.__R|%[}ocoq)C];{ݻ|Z1O^1p>&HO+W1 @]D.H mC2*'I;uٍ{덼ƟxQUǝ0oj^ɿ(nsET h^@|hAZ/?~s9okR.Cl){IL^+8F+~RO'5OϢ"Jk=|hנN_}{|&$a}Eu#X҇?OQ쑘9meן␑4'{gNiig1[NB|ӓ ~ӧ>i_g`N(и=TKC>>v$vG kl=w`ӱ`ML Yl}=& 8V ?$\!@M)W9cmȑG`/f9»cz /b?1=6J9729y?ؽ{hbfe[o ^Evh?P6z#Qvj>컙N yU⾁śm7Wsm}(~=M>^x$J.V^{XGbsQ)%1x?=4H(FJSio!ݗ>{Ff:x"'}+b)mz Mz&=qIOܤ'n7鉛MzyIO܈IOܤ'7yM^&/cel26y+y+X1aEyW. 3|}amyT ]jc"kII<޸H0ϠeM"&fI$la60DM"6,6,F+$kv7dq͞&Y\,I$2BɎ˵9ٕ|Y@̟xWzxa]Cc RL@bFg>DYVɷnZ.} ~A\"(y .8]!#:[1qۘx"02P u&=W+&'RU> mWU 妃<]E3 <;`{$d//zmt x8a3h`[|k!Mk9yj8aŕ܍8܋'ٸJTVcIǓ ;*ǀե*Tq*,3Iʥ* !w^Rs7y5Zwn<ݙnJ/{ܶ J/wVik;8<8Z,n(!(=AI)KnW ϭ}.mqR^Z0*} qvIX֯g ŒҡR6ETԟ!Bl~= ݚ*X*~!bkE:\*~vsV+Pk@1U"bX8GAs<o^* wK)HJ i$XNkcHwH{VX<6I97A1EH[o[wp6[>rf#SNkU-m|[&ߖɷem|[&ߖɷem|[q*jG-C|[&ߖɷem|[&ߖɷe:~12y 2L>Eַem|[x4&=Y P^ M9/Ȋ UzA[A,v5V1x^hB @#&8~atn Q:Lhoz@@3{R8I2=ŻTzк=_JʡcB) Qؖ@6<*cdpRd$%Ofģ(g]О9ٺs3v_VWAnN\[hs?Q-FFBta2* gػ}o﫳yYh\.3|b#,AŖ]C` )؉<_٪^D* ˮuL V\yXI Ai:U}6J,@|_]0̋G GZ~CB\:'68õ)m##o)B_ȱӛ8b k>{;>B_ }|/e0RGF.SՈ2wEi\db~Og&Yˤz8^x}}ɞ Պ5wB`?0{X6~=[L<9­p?o!E0RF q*(aʤ82FaӉBh3Bc+!N{K:s޴Gώ Tp{V^l!+)$L)!c< cL()7*[ UF=6\ۺDY:i}O0f7hz]N \۝#P17>L#1"1IS1K$*3\FJLd,Aw`&O] :\ؙ M[-s㪛w^lyWw齾`)&<"L) 2s4SͤHQ)Hd>Ζ]C3UBuFEb)0t:K^?##D7cJ"1c*H vL#DPʒ?cT[S}4Ih*e̲,2t( OtL`?A:SV 9Tļ"HێOÕ{yӽ@-WXe]fukW ~`q 7,翼ǻ׿_p?ozϯb (k6~Su7vr璷x9CS Szqvjj(v(Z(8 W`dv[eͳXYGyQ'n"oůL`ONA~aO*/OE48\]~OVF|1[ufFeN(W}4&i2|_} ߥoV01@xS|co&j]Le<_'yniX[, Uw\+6I`adځQ\.+b_EF8_8ŧV(=Wɬ8`#b"šwTyh8Y|xlnM5 .3UF2ut;]ڜjn9>8I-l< pw, f1e~:C$Oo=+$[{cc˯L !5oH_ % UboOɭ= qS$>>΂wyV燍YsD`䮭 mt H1(~"z/쐜 /^E%;A9BbwMyT}B)ؠaTEa O%ħ yz'e_ZBkO\ C<;fCQ۩ 'o2{=vǛsXhjpq/ji؁Bl@~(rQS;u#rl_6`,V_ R[G9`j2oM(D/A) ۡ-bQ!g;j7QMӖՂ t*I+=@4N*$ cJPI[ CԞ A=`y} |Z2:c(c84gθ0"%GFʠ(RcpP4%&U7KQa4C Nl<:.DDcPE GRtMMLJ~5ԶDAf*\X6ScQ+!Z  SaENH+.b0&6R*"1j=+[F:G\sf.žȞXPo NxXf <"owB 1_U1 pf)tn+lChwaoHg;7iġmb#BLJ7ʄPqKAbxXP3B0q҈Ɓ{g%W΃&@lhkS=4 Z^']XK|>Z8g_~N[P J-<-ΎH2l Q~ WwAR}Hw? %30!3[2$#AF-Ҿxiɗ|w1kEZn_\I 0iZI_붍d $DQwEu=m*I[ƨ"zڕ(ުzuS%;N`~ɢpk޵isnw+@G%"mvMpHkLkzbb&Yx@k. 0lor~m4mɿڋ_0~taem a/px2Un3E vHBnkୱx=.\ռpUi4Xh.х5BʻazUHJ@[bሁ|-dFś1 qzw}Pt'SmsnKRxې܆$ېچ,jGS?Yg!Տٱqñ}%U@!nzwݧ"2?PFW2lEIP[o[q8EQʉZ%C пZw6T)Gwaqr`%T=)>kKK#𚮴k_Nѝ|Q{Z:'p%1eMN@)KEW=v_.k ZVVߖdݜ8ð4@9( ybPYˢ+)ZS` K6M,kˊ&齭 ?x';6<T cآRH׶ҵ-tmK!]K!/՚(kN#4J'dIF$) 85$`Ib(HD ~y.߃8 t^Ϯ š Jv7 9H܍,rP΃%L7eFlUwcc+߾= ˺K[!}MYo#s*}$Jo'eb}veZ#E,Ê1nxQƟN&XcafLaG?[PMwC_TiA ._ƻ43wXw \*|j|6ڕ,Ħ~6P~֚,bD1m#c 2ɷ7ݯ(Z-fۆ$e֊ڌ`w˦@ݨ,qez@mvM$$te\#cS'?=t=SٗU=eaNFmUҁ /^,ӌfO"@DAN,Dce )}JwJj+>ilQdu߇T 4eyؔwycR]hTy#?=D^~P~ӏ.~pp;gN;r5 Nh޽.4(7QRF"f-E$8ۀYӓ^\Im (9򚕹;, qAXh4|%`̜i$ՍW7_un«BŰeS:\\+$1 Oa44!88%#蟖!&!WJ)\*kI.;sG|#'ǟB/+ΆÛܞ7&jXNn=6&I4BbP P$  FY&95,[׊WxIΪOv1lfӾh+fmrq Gp|37U )DȐb)aX3e.D8‚sav5Fy}#g#NQ Z9+|m=3ϙ q4p֞_ 7JsI#0,eJ /_.eƏ6la;fbYkpT]ͤg>4lrXƱDXCl<PP] +c! 78b'K0U狚fwv= oFE(>zJ{]mzmФ(pI`싱(GZߡ%)%IJA\vggU7n,.to&vs20'x/*G*M&1XA?<6v+l-zΜ{@qCO]>={XJF;_0c:ԣ摧v+vl?ju`+Qm1>QDPRnXP6`3O$:,Ǩ `~B-m6a[isw䀖Y7~}w&R8ӄÇ5Rf8NJ{˅_sUJ#9@&rQ)~Dḅٱ{Ԝ :'y~h͖@?W׾Q׎>>W.56u]1k;YOį>텩W<>9TaQKOw۠mhms%{Ϫ^[B[5Ϡс7Ȳ͠ci1vz>e1UG{iiňMGFAVsR+g|e]{ˬC0oWW71{ C.E1[Џ"GlX1æG.$6l"f!7<]]m O- F0[ЏY6(Ƈ\)QsCjT`譃/ZE3 <B~;m-)0ʥ@}<|5@yFƹ`1X6^U/^0gQXTIGd[rǸSB>:non!zaFqY)^-X}S ]m> ௪p5X;9,\oC j~(5R.e˳˂BL/|ggeq3`pËA-j9(= sW+x J$ğ 35jWɧ*߻_}i8 gw;k1|ȜR 3AL06deYG2tftNcq(VM 4XmuڊjHO(L[4}0 Rk=P٤r/Wj'O"l{HX~КUXܗDK$oV /އq ,y*΅Su;'!ɓKv!{2{'&}<Ť-f{mqc'ah29)ߪJiQw'I+.;Cz޵Nqf$+Ou?wR϶, }-ަ@uETd# o 7Յ(62Je/RNKjD-?5UzlGx5؜ȓvAI@F-)ծo&~~wGaw*P̝/+ح܃ \?rBv #Iv ˅OȬ뻦Fe;5Bc8&YMOwLCL>A5^S_&]HԆ 3ۇ,dn9kO ǩhJWaˉ/)|YyVr /Ќ ~W஗6q7B,FU? "42ĆÏ%b4jԌgŗVADdgD^Υ盔jԫw&`|=]v~0lru+q8*IgSk\3ne- ߃yS0e|b8XׅkQ$]e @d⃿. 0bQ,+ɩ2"rL ` Xy$RH)pV/"ˑTcSc.iqVJs_BKRGklmT_?sxlG9(: >/=X)*GӫSmIkYkun~k/Ofz b;2;(~`[Rznsde-Suz83ve|qw1ŝ<-Xhm `UetnrD`) NPB;d[ȗOjkmX; Xxj59BC|񪫲 |;lpA %L@رm;|kDxa<FQy#`h3-à@dyG9=j@' %p y>ӒSx d1, jpiqu8x}4`{BΏƧŝWL)[EvW ``);^RA%\=`X~+]~Z2݋ކ k_K۲B^ԤHߥTڻɯ$zb67Ɠ5͚ V<FgY?ikG0-W U0:;Gnm>4VmO.g(~ꃩűir(җc^\9w/ %=$2ʤ$Oqp\hCɾ>M\ْ;3A+)](L8`KYS|[#oڛ$רwۄ+ Ci% C}\yYZ_'S}]%v]1S0B7)\0[ܷ hMgPR~|!ωB*Y;cƌLr1=65ͭ TϧWЛ;?{[GO(P[i a`OvxQO3x*2 7(AO"i %{AG]=o: cl@ <)#]- tLRd*Wpw.t쌪{$,QƧV0?4W6./!1w;}EvؙzWK,va69;=E'@bĈ@4jI1vqM+*A\ BԛLF:Ġ1!S&pZFe)µ`8y32y{5ϷoӷWm7Fvs>)8gJM\n乽= U3‹? }2'X`; %;G!y0Òr,0 !K-R:uxb6 P\)s*#R"%cRɜ%̰\ADK``|"(qQ*ń[ej | ⱌU<eE#_~O f0 \b*kfevQ@pޖ?_|?11QӠ(P*YϖOVb<;;:?LCQ_ĵj!\8Z3x8hM˻'3W_Nެ7[ooVd{`1 \K2rl~?>kelk O~{c;.Uk[]x* 0b2W?؇ߟm%I`4\Y>܅ik6aW. o'w~#sf=.DoqYn#],G0Rzt=]YQ[:T`0 ޼([MC1bI~voH`:ťҼnⵉ_^ZT;_Fl>$Xt2cG?-+Z>_/(okw&cIKԨe/46΀RE\;]<bMa :tKD,0|-+oįR-I0K zVO+:vCz VYm2..|ܵM/n.w3|d0.Hra+dY%uۦ-;$[EY$U\s>³B,);ϭdĆA4l:\ܸ^(6HbI}y3/s)- CgXݼ̪q]c |.96HSE5_%G6lTLFhB݋4_hrt@ %z+X*C1 XPy[=a&y*9Bv*y 2Ӂu} 5{,5Qh6I"b!(bZyDxM(YXt؇B敪{%jsQJ$6:f}(`"`|5%S:^RRb*$}(Taޮ%kzi^Ʒ76)OBN&L3X KZ/ F&">ۻVEޥB) TiX-%iWtҖ&SP: K5e7Éy6QX>i୍86QJtLy+ WN 80'{>\hxZ;(,7?}cV9b +p|`ҋ&4 (INi9nM="\?}(TQ^#@ڜ$P̷>LL ]HY݇B晹OfOxf|Zh~J o5a_ GQE5*'Ƕ:ɕ}HI)$^pF\.\F.;Z}(`^C9j21 *&UN*v>0i<Ԉl JtFōHj^FDڱP69F Kbmqbέז}(Ta^UwʨMhۚ&{KX 21-OV>j0QMU*JӖxrb(nsW>*0 f/~J>O+شR P-&PZ# %+`ӺM?M?M?MR%8 ,Y-t`9,UP\Nz x]J<$uh! b2Es[SZ .5栤.YɁ MZwmZ'N'O)=lY;ꋎ?ڞFK?|f/|lSԌ]2Eujt֤jOh#*"(qM'jT^>GU 5\ءܴޗ~0a3x,0NoY@finQmYT ǸZJVz+c_{4J܃ftKd'}PZr? t$@x-\r{̠ZVR-3JNvSA4òts h-!CtR+*Xg$fM jTZE;mn3_׷UilUK3l>9W/x Ʀho e1໦~17??'_;Ke.3Gf]lo)O3i;BRT1uwt<' %XZꀅt`尭'A[\?Lq cT*, y266I4s$\9"e%VÔY1xel*cSѻP}Co1 K|s2 2RzVPE|`j4@ܫ`]TN㪷, ь*Yţ@ nU6 WNY2Wt.fq&f2J (21AAi/MZ9*yKloJ݄ Jy:+9+)hph3n&&/,$ Lii4O`+Qo!z=撚TDVJ'0[e,+$}^y]=-˺5p6 gՊ,@0}E!3’+ ,(c{Bkl4hY0M13nA[ۍA]욿\3jKw͕l:)'͗J>^#ڔb+G*N/fկğOQiBThcp[`ؙFDԒkBw1=d܎QQ(7ءd`eVr%YIʲPS6n޴QNhh/V,.Q043Mi_zpmU(޻ _UXdzr-in|?0SJę`P@DcB2{!L{tl8Ul޷vuUD6V3p^BJƅ.PR  Yi`YcL{VoE1طspOɉ1#ϢvO>+~ӲAѝ{ HZq56 3 "Lq "kZ{gPdYVk-E t3X]2'/)! Р ?v=.J1LIX{)wK#*$2&D8)KJ`ӆ G9͸X2N 'W~J$*HZ{(2䃌jtsY:&y|/-GvqnkV#j>#$}H5M4*:]7?_~z]ڜWzQ2|qn 9F8^ ˩kEAl9j8_b9~mٻr#WliRKyIb 6@fՒ[r/A.dI,>ƴǖ(XdLy)~BMk6t ,6c}J=bzlf'3P=?PP1v02Ȍl<$ϴrF9`ҁQ+؄lHym 2.>A2sA`>.|]YQզvftz"|xW9Ƥe5]8#38f ^Q@VK%ïV跞!ӚN,mȰc/\<:՞)H⹜Շs.cqyg`_l ]& +댩V>Wy\tX<[E|bUɞ'40®'.GK:i˒)(HQxͤ!0.Vَf:?g ZjHaHw0^JOH: )#'75:O&ގVއKwXO݇zoGO>X-s^\.>*ǐ- rgKAK S璓U3d1vxsx1<d sUPI:/39gbp){q Y#Y0j(D@ (B'\f:1V9f43gpl4us^l9ɖPR`v!qB2CK6nÚ,bL L8ee3HgYGh*ї&!34ƩWpڞ(@2(>e26k,Y/iy&I*#ʪv{'m(``VZ`_WUl?Q6KRPl!%W~u_WW%D_mыCۈco˷G«rZ:_Gc_ew|{׷.nO:Sii;+( }wwFmn;ܓOdʈ\c}ߑ?xœu] @'z.Zy\ǻxa W݉ጷF߀]kYn˝в#>y?iY $ L'o;9avxzK$6 *+J^BȽ8$S%"o&!` `!5(W :Hc&V-d3^􆵾Rae «`]TN[Y)ÐghХ(x˥Zv6coӤ~.Mī?B`!7d&>fs G#Vc#k:.V` Hg)c)YP0hb`MFE0^I2H3ȸ UV.LJbe]"wSOatv.[z.c䚬[ {Q>ۮN]7ncNY:%Vf .jr<%9J̴ P -(睍҄l44}v s9[g7Mہx@(Xہͅ8.C|āGS/!OyWȊ%+: g$?\H.H:fD_la[ȐcBk'LV΃ hg-` "U&_'Ÿ܇8i 8Ii'lÍTT: O*/ܥf32e*SF5&-VrnTI ݫ7Jezr^.׽ w[&:%, 3pRyo9@"3Zu2GwV:^i/TkU6J/c?DL[\ff#g姘<$T(HcZ3F7vwwÛ&:gݜxH3{W'xAc[{㙂4A<~\*j(s,,)cb\m`A"MJheVl8κH*)e2!B!m6-;K,CZY[bKIezUŖI'Ox@6Jݰ>ԗu~et} h7{SB ]3TC | ] Q[iއ,"^p{ ߣ|d?}~ß{ݤi$$]L`yuLF[k§(hnf7] nrN(}jzQ趟HyV YF1avw1,YœO'F:zA<=q4GcM;Ov୍(c=1 (us[?{}hjLҷr:zϥ?o7ylrzsIO_>P;2Ӄ++yXDzOu>4uq0:XL2;Zֳ|yjWvc 's׸]\izz.~V;%m3gUe]U)=r @w8uਵ4ªCYڊ6]V@W t-:i iYf<.on+'Q^F0Fb΋eU`l^ZUUmƇ2)$Ժ~Һ=s0vnҬ;Oun7Qw>ڡ煖ĥݼ!̻N}sANOWz\5g?k+K3 rVu nHRH& *6c aQ*I9gMV@z yezk ɛ/@}VxDڌ[/8As&/ى#<X&.H-Nf!2ՑG,6K* Q AgM#s B5xr4$ РD ̧/0Kx M()?IJm4GL"q.e8z%Dɬ&Xkp`%w?9*kU)FP9Pb ]] 0 U&QIȥ2>(!kHMY6/2K<q]*].=lw :'xo\6 ?~0n{:z޵q+E/'=[0`NH{/ai Q$G[+ڒm%9~yۿޜO߼P\~쿟UD|ޠ^NrCozKuN5sJO:_}hLbt)48Y(wti1}@`yInUUQO!{7 wj9G/;?K`죂4,0J'QS. J} s5`Iu^}i·*y\`xa* (F3a Ru/٩'96ŗbT|\x|;\xdPTM |9+M vLxT)Ҡv~}UߋH|7\X>fЦ}mпXیgǷ3F0< "K, e6,G0 q-u|ŇG^:0PƓ·q!^ ~Su(CK?֯ovm55 _,-U%%**,~p4[YS賁Gz&鍫N.B_.ݽn1 ЕE,{JJ'=[VjL'&q^]+LԣXd/@5&-L6QW%6Q^akk8~K7Qw< p}e6mȽHp24V,ـb2GQ߻!u:l}V)-D KeJ:K\Z~D-yͯQ6[ ȴsa8fҮv8A[`٭y]*0Ͻ25nWq͉!j>0FtÜXX!&,햛s3NZ'+5ǧ9l4 Cv7Ҙ89ft%t$ܠ|Z|ŶB䌂-#"_AsQKIlGB,* s>Cᠩ[cԑ 'aX"r坵qAYn Kp3"'N /%%\2NXYh6 Zk|pKg_cV k prRϏiF߼ݼ[댗\X'B!XT^$`iŅ(KTT[ xh5z{& 7aa/;Z̡عU |7[Z(T-z Xh Mυ▱ř$ZXX0taE$;!gZZprW ˓jaeѧ‰مC;Ax3˲dIyۜW=Ȣw3Q:7KQk$ӂ7nnQ~\W8;..K-sef̆=\XYg m-V6G66`(uy~bjPMg64Ԩ[Mm殽c:lۆK6]Z~|Pse/K^n!=xKaܓv-O;;M=ӉvN3B C: DWNbSDqHJ.8 Q[r;59"m1KܶCb2R'1%!UZ:%Lq E<ֶլE!$ Fk4vCP q@)&8 n9+Dk͹3p_ z]w{wMls@-[ñ3>?:˨Zс^H-y#.Cl)c)hN- 1LPaר|Tka!$0 QVP橣r)qH V E6őp.-a,ȴrW"JFcYDk-P-2d ZU>ޯ*P`tuB7LMI˰(4hʢ,fԙ)iY8EWnߔ *w,xF;eAJI SQ- hԼixY̶-%!D!e!x0k5f,`~1hFs+%JkfŗO_c>MZ1PG Vp).}Ԗx3,Xa+\&l]?_ dN(EA5-@!&d8XһHP$0䩥&!}E"l ҁ#w"†{ Rb)Zt.x:=a< 2L*!;(FǁEsZtR Ҟ.m59g&woIC f|-~˔3 9SvfL=);3egl73Svffl3Svf̔3SvfLٙ);3eg̔3SvfLٙ);3eg̔38dLٙ);3eg̔$JO1fAr&pEq_X\!)2h!9vSV.yɻѮ>[O1AT6f I4` v_ ;伊K$^##_yz 2Fs( sЊ(9 a`E#8(hZ E:E6 +7HitL*"QB J3-`}AP,@2q3N4Xq5Cy$Ǿrxƺ6-ab1H̘h+W"-rZ+m05aoEwtھeRp)-K\l(Xǽ@ ]^W!UgsLN ]C*z~Xo ǩ qV,Ry}6 ǚ4; e԰+Bt= At׽8?zw|ozsSߎ~?=~*Cq ڏjV՘ӣRi]nrxZoFcЩfNIuwT/] N.dwLw+8~.1Mˍn:ieoTm-4cnr7ʏ_w8~R*G!iXpan6*)\:&u~ i>|k9o&_:p ъb=Ϙ`/UbjzcB_G)X |;&cDžjZYqldDOgJ ^D;66oDZf<8(6Qi9_e\`('eA88ak);+>>ʦ*ց2t> WMC'z_ ɏί.[?ņN Vj)NQ y{<{b*a u+; XNz<~ԲnOL x]uVУ{:Z^YᯋJL>R` ThRTh_cS< 8qm3Gvm6?nc{+oc7F1xAo?mڐ{]Ysɑ+*M}(bc?{a6f#$9A  4EL:2#by4H)ʝc%LnS&?V& SB[P:paQ+RG3"ckL JcL@c!\c[efGUs̐VzG&w6FF >HQX yd%2%%I.BTrI5yn & 29fŭv zv7~^+N~^k0`:R~^/>]4fp`+cF1+`)$Ť ];"e?]^ۙgc3-31r fSFo@AQ 68!0'V"5VDDYqR 0vY4a#A29HysQ PfIن#ϊ@:HrnLS-Z֏nǮ-yLvI'zAv m= :ʉDOt$9:9\2?j 'sϿ^<jl_Di\%Dy[ qA/U;BV9~}ҥϧAC&w5<ݍӎ^%&tm>M`Ta_ZPքJ`bfUkR 8Oz78xvv#){ԗiP:Q٬{\x; e_ZP0<&{Hn8xqQQZ@.jPy%IJ6Kkk:>`WښiDWftOԲ&=$ɶۋgگW 4JtA݀4~ gƸl%n4ކA*"RMei~KڧA 3 رRH ,%v }QQȱ?rlŏMCEeAa(245qp:rD KR+dZf k ?X\񉇳]8~8=Q۹p~NѝC ᱉yiN'"$^$iŅ8R*"1jڴ0joXΦ6t$XlrZl7,% |[l7l7pp pafMml%S+-c1YdPtx \bҕVWTXTQt2):y+߽:Zv ?t)]B2 yå(<jg>O(RFXp4WgorYT󠙧]X܆p,}-mdvcU:2C\s5,ٷN,][ϣϣoo4u򽤸׹|i >*Ak3~o~^%1yyWs^}jHe km8 ;ԓ$aU=vTm3ҁ+'"N$!j˱T&}U|WJ_яTcyiN)Sj %&UZ:N1""ˈQ y@kնP0T+6^ \-y"`F"' l6Y}?Ϥ0:&S/9L~!ceQNU 4 .M8ZFy35jh;QQe2jgAk!s6D`f0aQ;V\/cZ>!qqI*B9DŽRn[ܦX2#1owbrX+B"s{fvd Z'vǏ pmTΞc˛iދw ?"~Y7/G1Hvyu_7U7 뢭qd@984bJsJ#P%gN~樓w?{-:.` b!{  '~^+DJ_!ݪ7m Xt2(:o;cƌL"/#chnD y/Xh~,|~#VXhgDB)($hKl@T2l-( df9bHjXSD|0aI5*ʩJ˝(phmґ;N窩WKltSe =tNq]VMN[neٖɓiw8IL*! /GkQbR!8>lW12#XPqJ'8Q^'wG6ld͇uks_AeÇo)M\` OP fh/#`{t$LEt!"'U9PN7a iU3pb |@N@:q6$wgRh@ciQJy 5pFBbAN @ 4(& u N+vHNh;S@ZDmժ.@}Z ٳ+zܹg9[`M ImoZٴ^+%M!^Jv 9ߛǞ+A$"#>VX/";RYw]z΅0H?|=Sg?%<RK=݉:7rZ/OWvWڎiICWk;JLoF??+Ml*og_SUu=dF9)sZE&Z's:&Dk给s q3͈Kjb~YP+sE.Blbz}b]w-y275hoQ<0sz 0Hh$М` 'kw8ow]5#SE)qK.ˊ[*=LHo]|*Lşo+Mk]9.˽iä4ħ3F,c4Qؤ2CX0po3@,h sR 5T(O+L6 *].1KQK\`T?Jh:qč`tBnG`V7;k/@D@OphWWQ<&?/ ?X^NT@M4N)3\@kOY|S]jlY`YN #z!Uxd!R&R/5es7h#І1ld)+˔eNSS'Z9[:K;_ۦMKdž7o痕ịUh4BkM]͝QFGR :%Zsᄞ|5bWgKGsֹY&YK7$/Jى2Wc[K;wJsI*a2*[v@m(JWQނҵP])`R ҕ95_ Y> n-]'qO|ơMP6MCN~V+q1h]Azuմ=,hyn^ͺChYAYnZ=?7y0BTfnm<ϹuĞm[:ng/Vfm9vsQETmKm~쩵~V4iS]Ҥ=I-iRKXAIZҤ4RRYpT ,eY7 :"T \7 \CvyQo`QvN{7@0Kg,^ <Ԥ9!B0\HIGx ĹFTx8%z2.[}&mW@5!&b,%@@@>RpY.};zK~hG Wqe^'Yi1ց#Q6!,8x2~)aKPup$6°(EXO"\hJ4@P4Y9`'NKuOmߠ;Y4lPb ^,cx Q#D (#@v;cdVygX̋K u 3fXoF7ڦJY=GYeo ߜ%`"{&CDJX{))Jnpb΂wDðq$ߤ0*w-=i&mCMn}o5kA%kV =ugupU/k]8s g,y:\Ѭ v0ő0Tx-/L$q줣t۠^uwvg!O^v>Eʝ[1I_jù4^c{gŠN<3o+-2˭3x326QuΎ0>}^0"o!:f_ztG`e^[(;Ap5?{WƑLxu 0v=rf CbuJS$MR`}[DCN3"u7^W+JE]Heh4Psł$ ٨ N\hmEH#‰ଶq. ppA!VΆ3:?~8tu{{%n8G밥&7{k_=u0Y7+|gMB!čWs Ux Zj9wM&)|}dbXx^;fk'z_/Ǹ;oTl<֢BܵTOI(Y16ox6F>f=׾oFȋ(ĭUVo%LǬd8Fq'4ݽ)PVzr$Q{ 0$ůCc}Їb"QGWпM΂Tz{YN[0~B58<[ly,+O~W=j|1phWٻ(_jZ0oQ(X:]]zzr>9KW  )s.x~ɲOfL x[{BstzkxU[mPQ6' Hfݎ'?Umn-bѶ6xxEҪXɿ~J%YMh*@~=gr_evmEx #њ zm`hG?+)I Z'Dgf1LrL*Oˆ5&학ZegMבypL fiHJtq;&y+,%o_*1xRryg=Duy_2uy&ϋiC_P }Rqj +T[Ri%ږN-┵'u8q|lj'RANMRIʐ\Rya"\#  KՓ"Z [p!˂Ho$с>ipTbv IugCD,)|Yk>xח@g?9с oJgƠv2zѻ^۷/ :2*hׅDdY7qE9KUƝn-'ؽf\ݬ/bPT*lыEos}EkS=KمOg~S р<ߡ%/_ribt!wa̔P8O+wj}t;R I$x(A7=b\ ;tC3W%MYLVYYh3x3&JM9UqC?՗*u:RԹ';s PLrwn;aoT8'@q+v5 e}Ϯ*~Y|gyl=f4,&4\UHR gsSQoʠiMfIP&sJFƆTf j+ە]'vg9J$F (̊hX#ўT;s]?EnmD>|؛l2KFZKXO ډ+Czڈۘ0:z.|BI z:b<)jՁK;x ^x^:ܜJJ(CP)$\x\*mngWiXM^`Oaji gz+S>U}~swb!=B7 ԧAaBjK B*G-kSMvhY>-O %6Gh-E5#Uń,f I*PB0Zd`L(Z[e ܜ Azn,I6LK$=uh}bUG*OM5֝]咄>-tͻ=x72R>n6OiDZɺN|f^9o˫.~Uzl$jT4f h{ܙ`,\ !):wu#:aNoz99\(Ebh18o3 R x(O'cK2Is(*gMw*|TmSƙ(sJC$ȀJV e8" Fuhz.m2DHG4Ϳh>4kƩ)Eo<̢|X-wU[|7ǂ< AYA 37Tڬ !j73#MSPɽޔ]є`ܾ#4KR9P%2F┡T^{-THy1$RQ~&v/e+okC(m|}dkpUEY3`^=vhʀwyaVm`;8rK%8 A1TBݞhF&J7!VVaIHeӂ*ظR@%󨐙B$^ȴ|&#\Mp-Xc-}A\!\wma]:ޭUR)J*Ҵ:&$wѴdh4L [VO%s}uY#^#/|^&u2!;Ujp\mH mzk#d+ZYlqm5CiZ Qnƽz7~I)SW83]$8, hA Z5G YJ𧓞80P$ONzJ׍yBp\ME"9<8*xVHpIx噈IpbVZ:&B4Cw)s0~8~۱|_<2#Xiwoy S _Z@YG`eW~h7gP Nii F+iȂGGg-Xjt"C484e7$Z%S(ħ Jk]Q s\Rd*uGvgDzSَcG9fkAvbK?w.`f[5{,;1Xc98*("H4b@:Rb)0qZFe)A@Gg Ζ. 2t)=I=Tks7/^ognb O _,\m sЊ(9 aӾ2*b,@hR z$gh{=Lxi1 HitL*"QB J3K`AP8@2QtXTQ^\锖,q1+>4nCXFrl1˜fpj+OR9hL "(DXB۞k}_i+=p)-8 <^ h *³\P9BY'kYFOp27|>WKf,+p6L.pQx({MN}6 Y?f^:sOa}?_~gxkFI74_B>zvUSMc.vޢ]NrM,&Z攞gt.%X-|t581riHN;/I~qսX'u;V;?/M[S=*>KOR`<=Z(u}*9ε&v'm1",Vw^dW姿~|&nxȇKXL~rgV)9Oۇ[, q>S^۫:&kDžjZfcb&K"\Z2_U ߍ_Y>eѦ}mM&Xgm}[ܙi'62O*V`D8* F K\R{4Q[kyXo:ϪHK~zzU|N-f)Az SJ5B-<@fj_eɟGgV樜'e+4MǙ8Baƒtm [Ў/]1X bOt29$>r͘5 xS"гx:[^L}ń) I{{>I`{%=ߦ]3'?k۬? z"V,_? 75xA(ߖ'nCDŽ+à z#(wRj7 fHA7mL[t;¢V$d1f4I'ƘB 5&[ygTkG 8旳12JAZbH#%,!),Nri_l~ [i&2hKw]8wAU}#w-#?5y=ٵ0tkly~l z]Q+X̵Q*g er+gI1ih=$W3gt1ΘK`W ( 5 'JD 6Q#Oe$^S3{J6ͪFngۗ-Fآ#F:[O_m(D(\!([JL.0" 5`#XTt>Cᠩ[cԑ 'aX"r坵qAY٬ Kp3"اt L 1%0l\`fp2qп Onj.n4-FWp^9I{}c!'_d/vpr!ZOB`T^$iŅ(KqTR52oޞAS0NVl 0ؗ3b笜ocHĈ]Z aƖMae+nq_C78Ŝ9s˭#)2yRܖ ѧqkfH! IQRrtHjg>O(RFXp46[3ܖ) $ow5뤹.N^2AǚncIWM,B÷P@P"Śf&<(*ֈkI;”G3$YΌfF 6\=ЫBNYh\7//@\H*Pj#` 6 :mi'X2?I^w9 o"] \2;ݸzyS7GƉ cI`w}f7d?/ptLksUwܥ*8Fml$xʠߎyөaI&Uvh*L;Be::i} bz3}:P ;cކ' F_w:͌$NP `iT: BwI-Q.*&HˉX7(E)]cqvGMG/:[ꂥ/n=N“ eSՁ+'#8g$E 眆-VSylU6ͦz|Gӛ w<,a)S̒t *-URJQqPE n6֝`tQxxO&>G )5M<eqET]F牎0.BQko.2i9x8ͩE3& J8 jqX;,$W! `85GVyݳ/fxN-Z d]|[-<R8r݅'nqj7E~# Nh&frlEJn(آg:=WF<p6Ƈ,Ӌ׿]|]LcdTWߵ;. x$ )[X1c@ˈiDk45[!-Z;(}Y||:lBc<#OɤjLD%# ւ@٤ IM `]QQ88S8[)rM'l;Y4\\I^5nN{؛ahp{ԉ .{@⾤jx8>v͵%_RpB}*#RL@*G` oW12#XPqn 5៊*o&wGGVs|Y܂ݼC=*/ vVsE#&oFp{-&vz>14C:!09+zaTY$6Z^nބ)&jz <0Nܻ~*޽O]ZKw_/o^SB(!O'Jg)__$ڃҳI3Û7fdCfOn9{%\K5Y6{rqkjBC.O1X@T@g# 2O9=E4 gQYԃ.Z)ŧTH)9o{ܚQm#GE/;c fnoؽ6Xf7HvހWlՒlSl1XMO'=k(a]օ b.H"Y-;@\$I3Ũ{1BY޻Sb_~O_7V)͝&q ʤ$/UBPj}ØcKPڇv ݧ 5 6t)dcDBu4;X>Q0QMC0NʟUpe/lF#} u`X#L(BD*ιá08mHU]gSLn&;N=;GmHo'ng}z)DF+'04qQ_]n8> F ٵ<{„"x(כbxf!~W4 B&@6b[6vrDچ~26| Zroiw7/?[qs:AS?:j6TrRf/VP\l86 F`62%,2A$ $UpF(SaxYQ5R@4[&H] EKfLz-64ybK'/՜g=9 7-+ޓjWDfbnWt,K4 8̿K8Х#5:|ڃӑYY'j9 !1.L 04yӔKޣi(RϹEP+`| ڲiV͹Sާ`5?Ewo{n3fQu _uSFnmx?Ě͠W ߧ72/5?&W9tVf7ެ+7ujuZ»+oyw;uϜ+ɒx6器]ޢ,NԜ#Tev}uQNS֖[gaO^"5ð"|eY!>@XVrVENe% +͉@RBɁ.9-;v~ ~fWMO1M.2kJ9RI_4@=zYћDumg|6eXjމS(LH`ʌB8*q1QV,sJiW3b>͓^'-"Й-3h}Qq%@ ;$wWyEYֽEmcDJ9~m!8$N\m(’prlǟGfVxډLWLQ2(鲁(V ))W"t>bctI?#mJIjD ho])+9?DZ7Lv_]_QϿ?t_|wp_o~8? χ J5R m~ϕ+1oD-<Ļnћg_z77^VT P~KgYc/[³cl'W$GZQVhdQG{4pDHp$  KiJ&IUdBZǴOtY'"L1#tL$А(0CMК3LIl"B"YZ_:lGGY疃Ք7yO1唙)#ˇ |lL3f.Z.@ LA *OA2Iw>-0DC|/WxL mjl_,cSe,%+g#7:}1ԲmɥܮH^HS]ꢀ#S -"Igdf`NG gE&>&؃Ct/4<ԫ/4A/_Y! bv1HE3D$󯩀t|*` ("nЕfu:}Wn7qN7-n^M!8OzX}nσ>,Nn:9I9v]BqmdU@;g{(#e Zp$m3Yxy&4҅ɾ =7)x&5td\-i$xʰLOxsTx2M<=ѦUyJ unD tҡg΁lPԸq8r˷q|mSηqt(|щ`N޻P=BP&#I]aA/A{-BVE5a-RRƹ-I2HJOQ>1V4vvF% }J)y זqpyӉV9Y9S$: &Ǐ8xg*&I~q +:2 [r7*#\KERu,owHu5MX<|9Dž Z |#'jt^[_NIs^^jX}rJ-L)GEROmZ7  F8Bd;>#o3.J+Y2\u5Zx*!q #W+BlXΦWm)o g_ۈXMF/㤴[Bd˶^SF: H2j T FuhRhԄ$+in)P7SeLȔ}7[+xSeA80,뤸ȕ.V촳0d4/7r$"Ũf.oo?OPANpJOV`|jo>*`{ߪxp`Q_{0C|_/}+/7E9<Ío@׺fɝF? ~M- XļXix;{'zoWf@U8]5^ޮop䦍zI`4\8?eӖ}m8-Ȼm߭(]b.B\e]f0˒p7<] PVՇWyj{)? ~?Z娘MCMU//gaeĖ +m^oӥ?/;yӥŖ2nSyU+Z޹EzZKԺ﷕xBsp} +Сo]5Y$j\,t15&c-sE(7z Y7צWueKB`Izr&3qM?Sۧbնi=$> r+VoҀ7O< Totہ7#nVD<1"ʰoLmy3J7mLZ8)& &@#$,\Z]am+zs-w˘-¦˪M96,=?N='J rکKZѼd-US5Qh-M3DF c2NPg:N5զ^wv/4zˋ" zAJm:c9 k<ڋHrIQ0Ƶ F=I& #( Hzc 80JRQk"tDD䣅іhkF)/&f#~6$ E "F6q!J;!] F(RQ'ousJw8Z##1(Q+ EqcOZ&ZZ{T}n@,}$V(P ~TEǜf*t.G\ D3ds<@rh6c|ܼpCB(8IQ8pd(mPxpP &7 #O6A>׷?VŚD\r2hpG[haQ=z%!z=,Ir'AD ι+kFE͍@>| A`P|ŚȒ"ɞ${vҵ-ٌf.ŪsH$)JiS?+UZɆ?g͎c#z#򺗹U;MU8}2$_##.+Rokx_}3hnB8oS,R|k B|(َ k T)ߙt2ΤrStFq*hjj&c}{NEsY]iTHK*8!q]m o]̜\bOI"WϷ,;J yꞏrw+v͗X3t oČ)O4ަ`Fn~Kw:J c~`3[GMi}"5W.'9AԋNNxV& Ҕ$I5L.Dq@@f>IAEs5[~&Ʌ+L%Ul'RYa;gõuc96(aSbU[ͥOEŀAM+swSA,v/pi~?RKeMΰO_]]8:n>ÓgjRoYMMD˟(j-zt%?/qx)Hɰ O~$akI8=8Z#qxuR«ѧ@~A]G|&=$ȐI8DېxFYsGKDx PXfi*.3'^$ٯ=%`āX^_j*;m9|]-tysիuM7eF0U%RZg᫦.ZP@ICa+4.|hRH]ZEK5r S لVE{o ggdKE2g}u޳__C9Z=+4\bsT4IʘCզ:`9>QL)XM,MPb6JI-gȨ0)J,UTOl{9zӳ>CgvIrG.״˾{ut.tso~ '^տ֥?yzye3㴿5WD %O)|5{>o ;sokn-z~,L|LcbpOrۊ nid&7? tZV֣摜nW 5: H$zQj31`|vQmo7|-Cr"~Wۛ;K{ 3Ğwܼ37Gleu+ MגG;n )Z=>{sy۵E,eh~J} T׌%&hӘ`tx>c,Yvx3O!j~h[\Ϗ~^۾z3)V&gQRR{w%Mqe}߃q;:887ek@.ѿ/QcDF,{ۀ27tf"N|q;۹ A7ԝB.d:؝aH2{nS,d9 ?6';_<rkHaJnͧ{=67^"ׁh]#y W]M#>@9O EGdVo23 U>`w!\Ul|˱y!'CBjG/D 0@8H گzu~"ޮ >H&EM[v#X/O]OBJ}0'gR&>FRgֈs"dN՜CkQ'1J&ju}6:%{6Z4VmF"PU,yKL d-WWqxUYc1vѼm6*^hC2)WhlpIĬFEL>y/RNʬ  hjA 1Ȏ .U7_*KI!oGDJ(ȗ  h휱EB\X E)V PT@t[ xi氮llG EenDI(@yUvɕޔ X4y:7p,jFӜ+Ⱥh4X⺻;U쑔%noX,4`kܸXe+S5!bIeM2,#"eЄϳ$4vs4V첈Xe<. jN |!P/ili,,[t0vT( c2U9Ǟ@gLe<A.2M;:Y{S 4͙X) J8 s$eYcj*-ڲ \+= ]j/)|pC$*st]RhӒmk)ze HcE'-h'E y 9z>^BKڝ3:h Ō,HK&gVyB0 ЃH,Uc~XdY4H!( ` @Ao96 a¢0,jY"R؊NX,eZcu!]eќcQ `fY:֍AmJj[Qq/ENc-#!-Jw+ aQ6j؀ Ղ}o%0=hܽgg0:_OiL{y|vZcD_,":{4qtƊӈf]5z H`ؾ4#͢QkhVɦJڴ, 碑x`4vp31 DW[/ 3eXhorY*cیLԃ`X'RB1!K3A<rco٬0h2#*O+Ii*UKiOY"G߁ > U W((cDi)UbNFbg=U4~ [:QfяQ`80\ ڌ#Xf=X*-UV d *Np՜ 9'kuV#/;Zi&DӧkI_xŒA%V4NrpkFD (|\t d  3 _4Cׅj4S"p#&D=8Q/=8nc=nQ")[Īh5]b7 rȎIyY 4ok(FĊ.P,tR@Õ,-0 Ygy \Q_];*yD]pJ= P Vvv7][@`p/=)re)h'D>UԵh-Wǝ#jL.d3yBǶ YlM}h}sεp7?ư?sa@AKG s)<%+`e0C %*wJJJJJJJJJJJJJJJJJJJJJJJb@^H f~6Jn5O\ ԭ +Q %򉖇h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(h(\%vsR>|@0gO^ +9 %ЗB$3@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C 4@C *>'%!gj{_1-2~8$w{`W[;dL߯jZ-eI6dSd=a _ )LRY@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ T@ ~@ORvzEAN5oݤ6 xocOa09SD\!@\/"gO\)%ĥ@\Zᴤ@`CŸ,.⮲fRTwݕ/xalz>zƲRez~Oz6WO1U[׍D"#B@)aeO7VJŜTDbF$!L^HBq0%M}f`zC'<~AgKxa=AᯧA6L |T4Ս1i>ᷩ}8ȟz~X|K1Qn dfg 8F0$XF0du,#0~Nh1b:%cMpstI$`r1K$d4(Ӈ !/B4V.ј38V)XMqH}#XZL"x&!՗?8NcR8`4WHJi f9E^X2c-.( H* kERYZA=R.E5zG/+j>?ܽZb59Gh5.fS9^}{U ϰ ^` -gaAPLfq**2 gmL3IgUth?b]g6~>/ ~.߻j96zDZ@͟#O)&e+}A_+9K%PF=4t}x09&.&0f.'w!- 0XXhme&C/MMyc.-Yz"4ъ^.2П&valgx`ŅG>٩8?^ֱ|;LWnn<.׆nľGsxi^_4IzBY?OQOpde{2a4=YV{٭o/Ͽ,jH&5mQ\<5ax\n੄!E,rWy$)u=꾍&=D3tnIiF ~&߸zapp}4tǶx{Y噅^ -ɇAS̹[~sxɺ:&E%}h3 ߀}Aj{Խu=-ml]&/ޠ7QpJ8e&7iցSI%Q("0Ch4T:<Kn(gƛ!)wU;z`4i;xʬ,dIY.P| |=!HfF-LF.W¢gs4.{;zL7wA ?CSzո zbQI|a1x/ԅ}p.Q~C^ż,f._P荶w;>{ P>k7e_.٢Y al9GIsr5A%İ6)sw%bŖxK%!N !@x¢c {I#^J# օ$rT=Y6Vm Xj‡Xdž@jiҎiimaG0y澦73/ӭ&eS2c>b8O(zUu"84/D"%8#%Hęiy18>Ufg *ugLh{"furfLcx/"V.ׄM ÎS5\ pWt{D]܂#c{z}?%{6iԠiK\,IÛ&acn_YmZ8\ճm(rnL·6kXsZg6LLRR[*] R5͎qqD|!ǹDE  AD*1μĀrV/yMgj}Voo_t C9 y+_//v^>X%U(51a2G[\'Va\@A ˽j!gE8Nz츩ijaУ:hͮ3ޜ\si4rc6s"ƀE4 kT> W.£E {sje@󕶾8fOv3-x^hH۹x7ijyDGOZħe봌IiyaqZe+Ex;oam`y )qF1X'T#5A YV՚g hAI.@`)x!"ˉzS hL)Jk]e+QNj39U\$#څh 0jHj!Ekk6b"@`cY%ʕv*2kX#9ii2abHB"T\HC+h Y>R-{юj98: -D 8>떬kfwtw]VBȭs`ôE6]^?G)cjhu5= `yL綏f!DrVVun wwnRfw/,y߫y踍]EGLX{-|j^t_^n Zm󚱻2˨')Dn56?MlN!t`hbILˆ1F"cCB~Ʀ)窆oQ濺\~߮vĿ~ta$ɶ"!C'֭_@B;3#!n&% 1eE<qrQ[ Lp-bVubEbMbG>-s]>*4X"qTQ^*:,S@a09ǂ9'g:U匶hZ[fZLcjpM>fƎ%8V؝vTzNĐ99Vtt"t$hh\oG|qQLOQ 9F-SZ:.8 諁#y|'0+pd^FʅuTY:x縣6FvGDra!bl55 F& c&ˉ 'r[{G4;4wO1|4a6AW$4{mL3[$c "jt Tj @udVA6"\@k4U+]~Eoo$iߦ 7r[VbW 7'o4 _P1,"S "JaνDZ `Po;l!Y^jqFZ8Γ YX)r-,A9 JT*iԩ̛JzXT.!1Fɕ3JB+ [C"s&S.D fV,T&1ƽ9i7KMaA>Z.H-{52=Mdʘ|^q瘯4߈oK*_&D+N::õBʊ)W[W9-Ûת5fbQg4$₻?fxqqHudˋ5yu>۾1=kn5fa8v3olz)[;ڷj0KMe<'j 8o(1udӂ ntKxja 2aV}<Eeq=.[?E<]//zUg賷jblt48U ]:4b(l/a;u^ΰ.C%QbB5kwuԟxbN>^gz'O)<_\P2&EIx7j!^]UDN( Þ(~n[ /o߰V{(HpOfY9(JSe- XĤkFӽRs=kr,qjAN ommo~;<'6% E߿p˦sy\]kvYM. V2 v]lcxJݫy''Nղ~XXeȇ$?<83W2a9QE3Ttb8,dYjrt6~r^[8W;_OkZ67o) \8]=~l9[p񓭫&X͔Ng|MѲ59Aw|,^#{zykzՀ9l3~W i(#JJ/pyXH% 3n=kМHs@L ȔL:C)iˤ!ZĮ2R j~( E}hU]=Bv%1T`ibW\m]!¾ e[Q+e$]!Ny .;*C^PB+]=FvpH+Q3c JEZeQ+#33rVNOO*N?z~- \҈JϕG*;#!$OIVS DQ/-Bi NGSLl$H@>K-RǍ 7W=CP粜]Dg 5an7ƍa)3񿏐ǜlk+;Kz\2K09Tf!gK7UDL4udEdUߪ߬*WޓoCUGM h@%2N,PBIol6+vƁ>PBZAJG4Y10p@ec._ KfD E L\H"P;wJťCP%UH!mqswn -Pkw$6j0!k!:nE_OZ&Z8N{_MY ޼>b-*:4S9oΗhU64@Rhׄrk\"4$2Nar䨆' a<(p &7 4G46uv=REiZ{+o~f5a^i %)N?Jv7)qݱqG(| dsOXݶmJ)ٸ"Ϫ# |B}hCj-p("h!^,}Ԃ@~ݍH< p -tYهQ}Cv\/>- 냁=@C>9<#UńQcf I*PB0Zd`L(Z[e<9#. .Ƣ~p*ҙISG'\U`GIExjQcWC8ި+UJe>rvџsSCuq_M:ޮJ?O&i M*F 451sDA&)P5rL6Qytbç8č3QRjI&!FV e8" Fuy.mT@aHG%E2>9.h-D<7\kT@N)}Zoc'>; sV.ߋ}ɥl[WI hpU7^AŢ[grGUfu.TNP{I߾;P>F{cTYDx0NɬBف2N.iJ.zFxt7V)<)CZ HUytJ`U Jn)`29cclRJM=0c[^慪 幍.rIeICWC?/~ݠnsS9}/5<.Cl`;8lK A1TBuhF&J7i {aNvl˵M\۴\``UIHeӂ*(Q@ Hdi/M.&nͷ7 -cAZki:M PPIRQ 5D%U?#o"qꛁ˶ޛ_{zϽ% ̛ y/F3ds)BdF-u&Gi+RH.mH82hpU6KɈI%bQ*M ADT<[=CG ^3Dl]>Dt e ]HIIQf+d TE/P፧:4.5.'ZZ&A԰1p֛|ynn3:PsQ{9?s!'Yfˆm~OU& rA38bZyuG$rzA 8{Yd Z1Qpk"$?G5ɗ?1\95gi,2oI)k|?#5Έ; gA1҃ 8{:ORR氵CJCMT\yi@͟w؎7R:˯"D)gU]QG׽Z\xG8jGc9.~1N>3ʈst*: "u.Οıl\1Z< 43G\ df =|z\S\^?&<# }HcF8?-N[OW uJͱȺ^ݜ5< E>lux_CNd9觢>b撹M1)v@N T~ 5oP&shD dgο BD~p]ϫxh:@nc;80aj;"`s&)u.FÆE 5_jnO5 b%,xE,Jg+IŸNz"}D[y24νĢe3koG`Q$pm1x5R$\^y&b\fl$M.O~VN"f^.C\ﳃ S?kLPZ+MJY|ώD4k"gx uty+?j#7B ։ K@ŵ4Lg`{"(DGic}RP҉ O(rgz)PoHJ|c [GAFl6 x1q]6 \,VHu mik1 Kar>=\퓀xlxr?t>=j8{*JVZ>}y_[ ĕ%$腍(j D@\e:! gx 1 5T$52J1ڈFxI.Rg:EKbLx-ZSNU[kJܑ)))IySoU{}l"$E%wL_⪄v_omuDQo;hf$V+m9Z,vrf0C l[a\"gG:Sm}3RhNXh2 qe<3nhC̮`'q0n:p߳}M&NgF̆* ynAS(mW=Jx;7lE6LzvCH95ڧ$ f>Yj9`J)1JY 9քȪwΪWzP +|e%^nFYE4j=w*io-se sef[h]ذzxfX. 6@L*]wjb)P M̓i m8W"g6xtŬ_XH5?YwУ=cjAڐb(oq?,stña{n\=ዀ{TDC*:V[ˠLR?bJjRD8>֌bciU*  ~c^Դq؟6pCeŶGE+O"0i m'Y&iRP/XDRQρ Lp)vD.7ƽd]'Cf9+Rb7A\? Cj/͇uOv 7.+LZ`di%I0oȽDhK~;^tUȳ}F%',QR>.%LM#(c\slz(I|z7 &ЛȖwϯdKZNCy.5x~raT3h:X/yﲒ_ͮ^gykz{eΨy-Rk\0Ŷ;}}jGЁ.k,(bR.Vhk6 M+״eBA_,Ńe~Zl&:̀j>]pgiHncz׸ Xi|fWVfX^-?sq ΘM-I-b* 1Q'JŌqarZjxWӉ\wx.[`-OyZi4y8y)"y|A(MօR^U41D7Q+!^skBjg[6tS-k!QZ]a6E,E&^[YtXXn-5(mO0Jqb,ЈcBHK߃f~цJtCO ?{Wن#Jxd𹿍&8;ճ|b2/:g۵bWW4I_4_ovw;@IUS)&#KR~d!|\@7{Qf;'敹*fyWvK}Vb~4'? nqoĀq#Zn.CS- *|w#/~~syPN_o󅃒 Kp7pa-g^6I Q%U*Owф7O:M-+%5aF nm6qiCWe؜NT'k|VJ!F^{kmAmj]4qغZ?69ttdJB뼢) isW=y޺l}gŻJש"Uda{ Cy?|*4 Q&>aY*JIK{($QH٢=z#dhT*<2#Qd`&{漉5ﳅ6-8q2nSL< ef7<4eoͦ~x9T1k|”!&(TA 65RZ0;0peiJY=}"EkĊ"ϗ H{^p5mHx@BK9!{lDVR?ifS?j^8ulbm5v/6iDOOp"2:.]IsT O4:rP63gѬ`9BUx"yWf;Ytz=]PA\Un+v{W#Hz՗ؙ0f_Op(lAv6N/NOw] ] ~7kw?> t6ߟZ^ ڹ7JJ3 \Y-0J: b+Y8gyٝ|QFA&|aaAТ}ol>;y-_̝x'Enٮ/?/QOqWn2{/0_zq7 jbEghcq|7Y@5c91StbBv-m+DYJjt7 =F$X}T:*w13Ao9:h7s.nB@X]A> ̃1%)r#"oŪ0YKaƟ7f vrR8Bmi y/B>l kټQyÍK&[xa<(@E@E0 HnP|GKu)Zm)#2#:4.7]!Zi>C3#I*(+,p ]!ZurDic+;?K+ЊX"*2IBǵH)\ʇn hDr?n2&qJHF9"aV&֫XDT䃾5fT e7>0oRfh? o P?[Sԯ;DWXµ+th-Um+DٶH=]= ]1.0+lYg j"BWli87twpug ju#t~#+k=At@=gT*mRVn!(]`FmghrFֆiQꞦB~ڦ Jж1ҕZK!Bt+thy'Rtut\E]!`ݝ-v+Dk[?Jvݡ+TW Ѷ? QVz:ҥ  E3thm;]!J{W]gDQiOWVOBC'! 9wJ۲X]=UBf ]\t()Ul +BZwc7BYx芃1ӕ[+6jWyRi;3lvl։$H5P'ڀUK 0XKҙ UYAJZJ1h-_&d(?S Ъ-4Fu034p% M#ZNӈHRkڂ4~( JBQQHWJ[#4-NCJJF-]`&lg v'Ѷ?SgWtute`\w+{蜂5t({:Jsv[ rqrPWDqhad|lqTΕ"yEDiTGDi-p Uj36e>qiKS ޕ|pot0L}ދ]낕 +;ߥ 4eCH0Pwny94tKlexhm`-!'=秄RJ3acD4 꼻M0!/v⽷]_ׯ_f޳qc+:6n6ݽ , 4 >c!j$"~祇5lm%֐C'yxX> |%}bxzMUplyܒpU]Y+r/'4^ZST|4wU)*Ɓ2/Gv@^O6$f*fM2=`9MW1>Nf-5DlQ-0ʭT*,JITsI.S4q|8!鋒&ָD%x^;6ENg>Yo"}4 :} m%e#0] ^G+Z/  bpg:ţE)B]ߋ3CX`T9F$㑅H>HԔ1lq+Cʘt;Wߵ*d,ǺZDwRϊJ,ePIɮT.}+902 ZRT|9Y8O^YDDێciMMӦ^akL6tZEyPlk&4V8٤\8/ "826\3eT`.B~lCKeYv%`"{&CDt)RR(Ŝ(aH|AV9^i/ vJ ,5f3tCyA.;38ei8s gOǘ+C;OܠeAһj>NRۀJlBń[ٓC%+l b>A퓔[Y?Hu|[ ol\LDRq]LùÔ1PP- d֡ڨriCebuf]AJdv0`Au݆Zli ݘn1jl"ǡQ&nM;J :чurj}՘kth|8ެxJ/ʊ*;Ǽ=-6eT߫mSru8¸r᝺h0}!e!*4]}r|Z $x, h I"z8hx2ybpv Lp0]{ dww7 46 7J^Lw uf]kQ-FSČNp)B*mAE`| '!tǁK5G e1u $Ӗ>A:*`j1:؁e,s6K˜:q. |$8֬d?8+Ҳ^8O(sYc6Uř\I~sz9y8ߗ?U/Fq#/ +;Mת1Q/2uC9~_%z放~b.V]7]Zl lAb33o^xru&C%bmjLbV<jC$%l6QBM|m^uK\EM30IR ӹVW' ,ߗ5_̦g^D J%0g@(`N޷c_\3u`K/YS,wIܡ`]3ap+;j9s_"Li^#m<,Z.E}8Efodnj+ocQ7f~yMvm}bەAF=iR;_;7 &8K)L[t;¢V$d1f4$ckL JcLc!|p&{U8ܧRK=?s6FF >HQX yd`D KeJ:K\ZrĔ*6zګhNqg;kۻbXuf肹y8v3ke)k xz8/5׽y5(9WkT V0brȱ&m猕Ũݥy{dwKKI'2搐 |`3Ft@ÜXX!&j'@ Cz Y@SJx-`lvRopiE H؋pp3oq5k=tMAP}?W籷Nc%shrN(\rs<ݧۦ)?yY 61/M;'B!^T^$iŅSY .TӤZZ80޾M4t]xI jaYb5y"F2xk pD1Ƴ)}Q}be,:uzptx LqY̙C:<NcʠNO@wҭOv]>C_2A0dt;o`9 O} Z1MWL#,8ZIsTumd/1c*2GWYXiЗRކ,6CGYeST ,[~Սޮ?ըcG;{%&.K(5oW2u#\y GyL0SyTsf*Rbh[vSL=MDe w<)ŋ\9Mq9#iY;s[MӤxokHmvJ1+,I7QJ9Ps#+)mk{E !I%iㅑ2(ͨ:-RLp/ zg^G}1~&Nkpo ȣg2:˨Z]H-y#Xi8e8ͩE|}LPaѨ|spvXH$ LCy꨼~v(k,3)#2\* -2Xbi GLQ2uʯp`13_ib:c@'ב)HTM#e[/a%(j<DpƨfӀ&J5)ks?j_ :,>r Y]/:M<Қ$}g:f \&`뽏 ϪTx3p>\` AIGWZ"fs~P22_S22&d68% q vղg'Jo5xPnƒ m0luZW]^[muAUHnme@Pål|\'ُգ^[V5~R.w`jJjޙ&'A2*fU,ˠm#;>ʀk-)$-=tz߽ɐ>v(B0r8厃-\PCC,6{0wex=G{0=mxX浐9"00aQ;V\/cZ{?8RFQeor 8 sCYqaREbmyۏw0solwABKzšq Q7,r;D\~gUz˭#~S,%< _Y=fXޚ[E.bqEz:s5,g\ssG%7EYF}R6HA`k=+Dx:boTc%6 *Q@:G2"bHjXSD|0a٦*J;Q_lgaRf NUSn^\K=>x4\;+hq@rFMe[Z*6.8|q2?T46BHj"<ŨVf> q JErA%PݽJFd>ZҹV)& zSNq&x#32x5r CF+iR2* UϲPd4,Qv䱨 ~i#p|5/\b˘d*P !D0u$q ٻ8r+L<; Gx^H@ERݭ8NtѤ JI],f̓|J@ y7xIK6. @'}B`k.Uf{2g?bsp.fSQ;tF0`wBЀL6DWL|#VF(T`}U%+ə+Zsȿ*!3CEgSc . Yzb+"ي 3v3g?keCAn㉈t_DTrB {UJ6vv[ Sj2@RPlʙsf'WCs0#jmԡiM+m+m+MZX,˧DXYc;kl}J; hJ$;R"J^NTs⳼inyU h@_ztV PDeH@"8Kq+545ﵐfCыp3|VAfoWnlwwn:XӝRAb9Q~#lF$+xU rJ)j@݀j@ j@A"Z *i"[EɴKB$[OEm%-]}E/C:-*%ʦA e!kj3nЪsC|b5x{7ޭgDcYk/Z roKu*#w mE5:#yJ}(Kb\Rchb`^:w'ON皢STЮ:[$l> f;1#1`pC7Ӻ~wvl3l!ǥ"2+a%H1i!+l DCkeWIght$LliXEʬ`aD.WLKEcSкG!i!łgrXx.s8_o+&8QcQ,H›Ow3Hx7k=]»Y$%5x߿駘Oyio{Sd:}N7QPh^JGh Ihq 8w^E}ȳGֺ˹TR9.(ڧb hɐ`|+jOTb5䂁nU#{ћk_KTiٚȆ,{ t3g ~gx~q|7ӪY>S /S 80. +tyfiAAXOT&q'qDUd7vJc%J5gv"A:̨DrcP/0dsk+xݵqhj {6|.|ɠӴEXݷnwm?_zvX_<=7f'3;KGsuCR׊Uvtۋ;o\{>4oԹ}CAzww`"Ľtfj^?6w|{k.w?Fs| %$4sa4I* &4+7씤vT@`nS60v3h]Rp5ŊxQbPXk5"nt37ر4[T:L7+` 421>Nʏ&zpeDiGW`FW WlgWl3S[+6c5C \FUχfG0ہ+,bWl0j3j \5kU2 \M?&bO쪙FîZ0tjV9/Wpેdգ ֯ W3W98kg%,W0_uPp{q~qqyO~59+nzx[ʙ(@ʊD"@u_ɵ)9ۆmvǘ+f2*c ՛qduHZTYbjbť|C\Jm@DJx_Ad>*Mu/T[30TYkΩT^ـ`TNx,es:kG[ n"3- /Ҋ!vT,D/]}JCzݥ!]%k⺪.6#{ټ~t )-ke'ݘыIb;?[D ¤PJ {mREG2ANh6[/6a ]2ؤ,/r UjζA= )9'}v3g-v pO[օPsW'w:ɇV?kB[z \4EvFk+*Wt9DkIk,փ̎ Ҳx_SJ\Z팙/⭻+2Z*5Q>]:^}(Srky&X ʚB R9q7K9 vOB ݄/w˖_Z&{EQH~#Lޕ15)U1V1lV|.Z͙ SQTU`' _hJ U mϙ{2xrèL:㿳ٔOJ;E n-\W2.@gu02g_й]"(Șf0D5IAeRR)KT3I5YH2{sˢ%\^ LOWfY'Uy֛ú<+vti_Ji=Jeb՜< ]6Ad׵"mFe)JkIj0ɇ )1X,1mH䂪$Zt3,ʟ)ȴRRT h, +CL YYDSknqUl/ߘm ](m.27R$v!?ڻc Ŋfܻejbŏ<5`81T2TF%q8Y(" `5656/LqBF G Z~)t?Ugj"ZA,痳#(HDE 0{V :b "N=~B)!\*:}R1xS2ʢR{D>PThWN! e[ Ogҫd>)} [ɴ|eF+@OdvZeɒk7*NN4 g^ŷ-yV{(pxw=@;Pyp*H^fBmBF#l:rAIӝRAb9Q~C~lNF$+xU rJI~GG݀ DA"l&&O*i"mrP#(v)]dkdKqlQRF>RCV[4C"6B,dM"˜+\D.)MzwЍ.Ƕ/(۶|,{,ٷ:#w ̭5j0܋OOޝ].ZY8q)ss<@]aQ&bBV3,>'ෑx$:|ϊm{ $v uI&jE|kbDrT1T46I(}Qçłgrv&<ާ7jF7yxU#J[)GӕX5kޕYԕ v%iec>a맽u}N\|9]v ńBW"h?RDc hMz9p80섈Z1+ч<{d-1젒Alm|uA>WgSN3hXT|!DU`ud;zS'i1V:;90Exb|1$=vxw9?zeϏzXq?sOD<]_s罶g4뫄SY/#)0PQA9窮* FK@F2M4 \S lQD9j**U%*lMdCkuƺ?ȳ~uo#[r̨g /mi!z&^},M/-)fv~f$j^'|p~}SW43}^|~.Mc-v N  ALޯ\J/h~VR~}y( kП[ cȇ6|9)`Hc 0- V3<"_h6?XmiBnI+z_e!*laRkN((褒Ú Bσ|g K{rgi,mXƫˮvv#>]Qenʭow˄hGjC>v/-Tt?8;GMUsU,M֛ԛy< ߹Xfa8̽"U:nmR-(9|uizy ,w%3+sβB.NXY) <1mw6njԣԶasnn``mF۶% ->UV#q{]nQ_Èܦ3 + gK=m`-iJjP]M6I7W VYNȚ;~)o1Ĥ Ǟe&vf:kW5Uy]x!(O EᔷF<|n{zW2 nfJ6xNP PiT: BwI-Q.*&HˉX@V|3z岷,R(1Cmtu[p-#V(Ψ"l:ӫY )bh:G&2jվax1kQj "PA/bcծsDgT#:O#:#=/]e~%9 ?XrynF-Q,;1%Ic98*("H rVbX1!S&aJ;f2Rʃp-a^jF [kΖc9?G쒡6r^t][+hp?t6o5JͯnVLl{|ǯ&`qZ%;G!yp"&XE hR z$ʧdw?5fƙZ`szGDJN[2+̰4N+I$hc(U` ֓)+>n#XX-ab1D1éV]cmM[#iҲàC:% UgsLN ]E嘳99$oЉWg_ 5+3ұ^sᗹ,oN_}qwaۯ?ͷWWR(.%j>uTڪuS[i]^QI%躱@9o;?\ܜKgthpl(wtj1}Wq0etGWby qz;5<&V?/~;8uxa{<&SW(LQ|*>| ϋ٬W=|muퟃ?_z 7Wߞ;(FkL꾚hgX?t~u:[ *U;sDžjR믧cd0QsQ% &`n߇OвjH|7Z?f&{mXSBk\3Fle&Y e1,Ț`z{;Vqy2Qzk0Ʒ?ƅx+}֡E<:`[uYe>W5aqϞi1OAٍNeyٙJ,Y-|t%Nr~F0. t%/%gTz=-i2 ZA4h-ӠeLirLiвgLi2 ZA4h-ӠeLi2 ZA4h-ӠeLi2 ZA4h-Ӡ7.ALq4+[_X,Q` e +gI1ə6.m̳)ɪZ޺&7,lǎ7l?1# +f&ba8 3 kp -,% Z"~;,8"Gh!=ppǭ1ȅ0n,F9dZf 9ԼPsXs= "/Γ2swL ӌxc)/M7`'B!^s/r逴wց 4UDb$BVFoĠ{S ++L9^*/z1l̠+g ͉&`8 /V2<ĔX`:ļ_1AJ7>*D}$-NLYV0EJn(QP'?sɶO7&9? #XXF w~=.Y<2eѰnLd7۠ .C0!B*Caj3jX$"Xe4zl5ͭ67&*l$ƈjz,4V3"`!`Tc%6 *kmc99c`EpW[1-$e[ ߷zx%&j(Hci|}@̺ s^T `xnH( Ig巘<$y1dnz=Ϧq\%ZW]}br}ۖ`͓CS閤һoU`fjmϒNsrZ' '^[$uMubjo~e׶wBcޡɠzb6 hKg܅S`A*DKwhr{ڕ)1K.zR SĜ 7q^I5͌prM͌CB\DT6){n ַuv{Ln'a`} Tn8}^N!) DN$:20 h@<&ƹAE%i nb\mmڽ5QK%釴XǬ=N)KI+} 3 Q>ڀ 5k "+:51`D˖t>pBdNڨ_-Fll~Ȉ\4ˈ3/fcndzh):ں9ʂ,dg$D#g9La-rVm"=#2>J&2adI U,H{N#sB4e%=ZNA(JM4uMn'fɶ0~qj_젺vcw;Jue,CO̯OimQJUM&"tL_EfQ,hUDtB9{5rߝ}zIR}thHܨ ^mup 6;uGO.;v354(9&Eŕj0reYNSn0m8ϔ}oWF߯6lr=+lN\6=el"ᱴZf#2z/֥WNRYͥh\W)G ųAV^)S,ʕIW1BLzuX LnBnB+nr !v>Aq/+S]\YqdZFH XqlvFŋ̡E 5IQ9 SX-XFěGʗ/nna!ACQ\օde4}`+I_ʅqz\ / 8.P+MI.jdIBV2 zgϿW]óJ}PoeS"$HI}qsٖ=Y'/Ŏ=)u9ݵb=_곖 x:w7Ɍ|/?y0K9p3vEb@e5[l܎nK% x0WWzPG8S4`UGjUK]9OusC>Vu%VnU[aU1u":ps>,gN+T,,-R6k6v.ΓQ;=%ei:>_fwT=_9?;[MvFvn^u3N r1I7Υ_JW۫tYfoͺB*YQɅnG5]X٣敒x4R632xO<8:2&;SVo t/PyCy2/{'q2-~+D[I0'&ъ&B>Ko*).t9fp!\;Ga {Ýۧ]o@U _q W^:%9h `c6̼crnۄSn?oLQک-ZYωT%5g;V6W$;b1輮X)2\++4㝖R ՠAwiNg44$}"SE r J'QOB /1JbP*e䲌`&sap:cddmsن5 2.Tя$2 r_/A9Uyba;r2~ Z^09=?x4ԛCwt q)Ron@PC4]t \]iBR.(E ET#gng誠mNcA 7HWѷ+ ;CWnw誠=P"뵫HWV1]Үdl]w Z-NW ]eةCG ]A骠ի+ЛwLmI zipO}Ih|JEW tez:t9C` P_o_F?.sSHѷ,>:MDbƥΦR TU\V C:SKc8uJp`69ekBN,n| В[ۅZ{]x]blP|_L&ggS:3%<]ڍ'Ka-eg%c%6d%"R9O"d)@N'LJF:/u+~b6prkh)h7j>RJY#`"Ĥ.Y9i9Q(G/b%#\9Á cVɦbr8 q^⹱sm*+j_a2-YYV](ȲBE-,+,{1UtD#qx *B$dy%Hm8T&6:I %gu8 ^Y"g94ԝ2ZD.Eݡ2fR_v`[.+vE+H.4 Lp R~. #sҪ粎HNjʃ媞Մ̟vɘ}2Fqj\ZG#]Zx"EL,JIL^Z$cFҸ}/Ѝ7i[%͠>ϟF/Vu|S]9_.hEϗ J [<_Wc7tz&Qa9cMP0ƜSRUVi^~Iǐ9w~d`T`T;h |$́,fg">FڊhbPFE@43 }F?CW+dsIUܗN>~H&[4kx['5H'ُřLT&ONLUVMeuUTʽ甞wJOdїkNb?%Z<1 J M[Of]ag KR `xnH( Ig巘<$y1dnz=а">jLK(7 & eQIQ&kL+VC{EzB{j xf;iNXَʦh+[YSGYy  *Z!f.9 b9w!XЁ (3c&/K.zR աSĜMƹV.I5͌yr.llfʅ0BυG%ҶM٣<_i8MI';Vrr2} Q$r $!Ā{FU+.Jny y5(n(I6@ lLCIYC,6}^݌GWqŸl k76;em]yo[G*wWwaf N`CS&eR`V?/I'$^_U0׶-n6WLDR?Zed,siƥ2|ZdmDFaF]) $):ĂT 88LEV va~XwvZ/&nL1 Gl>949k9bQ< In$z):gкYi5Q,9RQh gLTm*$"j }02Ʉ@4"Ar2nMp򒖗6{}3u>nһSG]5ߴ?ߠ*}utv?+ÿ7c6w(9#D9^ p1:(mˆ!U^vG7cO*]Peȕ uLxű8>so uv>|tYr,&v> Hw-EFּ&LzQ{$$I(pň,11L B}Ơ`(OmFխrsRTbuՈ(f fX5zrghzg!BAеr.\)P#4һAzoHl"q): JZY@.9[F;RGB: DC.h#%U.> T:!ƀI5$x Zb) n|L.%$X3"qI[ƹlm]O >"1ϯNv=jbwu3T߻+w{s5.39 $M]c@rH$Z=*+QHI**,ҊSZJse.0bp0995%u4ִGwcٽ/`)ݓ__.2k\J',L=]ÈO&yW鮶O%ydw^. ɻ] CgVO}E UįX9/UU^u>V*Te L:fƹ}!m9s(Jĕb!\ݹWށ/Axd9n,"OUONn.˼[6*u:m{եַfu睊tEոi:lW~lRɊJNn_S'5/NEW:5/mI;*ZBYAOVRj1UVxO֪OJTptHRXXI²(Jc%%*]:kf{89VX=/9zM0@]nj+tY ff>{k.A*ƭ cМ ȁ/Y !lVYFU10@5'XRI ; j:|ԖhYl:pcS02=ܔfAA_z uh~b&@j觽 ˲TwV^ d` m A2Dgx1n2s$B c"kXIaw9$Ӄ*nGŌzN6J56eYSϜJ h5I ,;@K,:wXT}i|UWymV~ͺs[1sƹE-CW& f6ZC ˴Y2-pK'!~hXG^H)F2g8F4A:ua6VG%,"l7RAkZå>/Y>WN!c QQ!ZCٿ_ίN?~ 4/ ZyFiKۂfgڴii__hEC?ְf]ZF!t~}?4P+mJEi_nغ;I{zuub?tdjUǿ뽜>eG.ΊiwaRoH{6XyV|&.K/:OZy3n/ݦoh+Q9Ax)4xo滳39dt~U- @8- >\cv0~8k$-v9łx4&;l-.xӧlxJb7\{g}mq6Vg=7ַq}7~ nމeגּ\`*gpUV%usx/^yQƨ7kp+#b|rBʷjznzәDUۛ8؟֪YG0%HzU /[ikYy\Ǻ7W =+GbQǭ2}-&8b 2Go[g8~ ӥKXT|F$ɲLaLR kFx}5gO n jsb=aHh; nhoczܤ[?`aV1!#=bŚ rpVhF)~} t!MAńۍd,Ff-|`FW%_&4z^0@Jk}YI%]'HbockeGUo?I3u^=+1ioYy. gq̒@B -f7-V],`4v^Z1øBs݆ lwBl2WSIɰɸ+`҂CX!űC2dde]IOңt9:XI[t F C[r 3.7n#9j3rF&bXOmDqQ! a; 3Ə˨|;cq<_0oR S*h-Y-8j(%ac2OgdYdÀ>mFT΃v&0L K.%xݬkEI 'u,XHۨ(t2#9$wxՍug78"8|H_(XP16gysINw4#x'/(7$5)2U!A=5XR0BY7 3m%n zBhjz_,epP쒔߶iL1Tej2A.[uـ9IW\l*'u2E>KXO>a3Cz ]RR#A 0DZrTQb$Ó6ʘhOL(he\hc񰙌w) HvYռLZO tZz.I'Ӷ8[w"R`]HY45o"P`\`VdZV9_YEL$CJ+ gzU>;1Is߂'F7QPA@=0# plN,iB`Zxj$ h?.A"1WI4ّT͈s@$- !ʁ'9 Ģ'd2118frV)aRJn$j'B 1쪐X}eM?ԇbQu_yPQ74p'yt4L vF{tF'MIQNL9mEev^9!3\}ld=-8a/: %H/"hi3xed~%Ö4y ^wWrޱeAws7l9RF O@%#ؼXGj0I*XL(zl:XBم-Fb<]Rb]1E"v=:m6S a-J |SL ./{͔5a'쒪^80@1m,H 𢼚\ӃKo{i ANJzPZ#+(ךY 2){ݯĮnt"w8Ed`3]ХuPK5- YȤAJGJRTqrt]E՗H%@ OJ*EP+Dd"J,(\QbS!=eY͂5 l՜>ꢡWL 0ldDQK\ h:_kr 6r* wj71S4JLF:V[\.it*x-v4yb;κ]7'Ǔ*\]<{uOQŝo+Ϛ*!>~QS/=vڀbe8q( 9ޡ=s=5 !1&D<) DJ,)qV)c֪Nq2MaS*D,l]%ѥ A!]_wv֝=6?qt5|9aNswyYt6k-wWP7"hHMԙrnYwz|yU!ʫ[Vޕ6s)cirf+tY{g7:ѼulmxoԼ(Ԭ楒Cv=2v_QϦ-7ZNp-uv,gEB7J>Խﮌ0}Cu\%ϭ͏E7WEW-ԵG, S(P:-|8mNJ3OُcG[{UP#Lǣ=nIJb pp,Ӹ)JH2h4<(bAEoL"iH9hJQ41P8p;mpV[8(CdI)D!κca.'egozYׇ6ǻ!G۫qLJy;k(XM2:@4F&'e$\mI%YJ? ku4y )RJ3%J {fEF7ZQP#E!kr](ܷq󔧘_Ad|deEh-q(I:pb 0PYNp/G|C-M(RIIPc˴Yu*`> 1IXn(5C`(q Iԭ"݇1RzFHzv0:AlYMLhqJ R;/v猤w 5׋pll&|BΗZoL|hy?$&~! pa9[7)6*$? ?F ՠ f9],Q!"89_xl>2 .}U뫋W^ZޅEz^[Ժץ´pmϨ?eQ닌Xi[I(ڲ-_W U mAEBP F@>wsg1h~kU[!R!gesb|%9K4pGw |}E&󢝠>)y&VKlޣL4p+򨠉rE"Cr,&FZZWi]0:iebh)>q3.%U)zc`Tl2݂AnL2OSmr$OL(ޛ&)KظSN;Vg Ϳe hЕrߚ 0^ |hKr2بfTs̷3)?a65cZL sΪt HDǗŗ7P[:/n_xS ߭8H9̙bH.22%$#wqβwolv76S5; uA֨*F"0$60ƃ7^ERIEQ9`ho1zY}z[wugq/Ճ^Jj6q۪e^–;_kRuOHW4uΙNvE˦-53C/{+jD-x1e W헽wjDB3~Bu:};朵_^k9'±|ƞk0u U=I& #(Izc 8atD HюjgUg.Á|ɤ"P,E(jBLNp;šC@Y(ҧ"$Q 5t4ZܑPF&4:DǍ95JZ(LL;E nB]O H.n9W6U1 gLdm2YOBm2֫:fN/zZ:frCAh8$n hQ O\pdhmPCB+ LxױұI&kߧkLxi6y>xp1wFՄd䊗e!?BjKtWpq>jw8q~'~NGj :Lf1.&Tf t$@ h0,4?!޳q$W \fduB?%iR&8Uϐ")%i$%4]1N%nU+"]Ö}eKy Y jm[g;gIZЇqa6x쎫Ge9d۳~Z79Fo3;2`Ec)sA ERȮ3,s(#8|͏9\1ig\<,Z}H,bUr'$?$wȴ{>Sygξ'~e< Ëލ%߽wL{k`Y~Y&ZSr))0 )E\$.2#39rYJ{\d%i?4<]hWfW>GIz>re1 RRr>1YzmVUr Y幢CmHߑbr"(|Ɉ -ZgMEFtQW]mླྀ:X侱(x (bxL٢ۜGprRK8|˭auOuxko?<˭Rr;#0)2Zf1+G %cφsO ԈV)2:ǘgdӗ%DzR0 D.Mte"RmSy#0㾴дL uG =_ue1 fl^9#;4a_AxSlM̙V`D 4/.+F7qncgdk^62-C\Lz/(dS*Q u&q`XC,hӶy-pSl?Pvkq_m[ڶ1"%H!Uq&g.sL3dNW,jݬv<gȁ &Z J$ApH40Y .LڹaklP?˥RPv)"gE(c$1xB[= ^}.!gQ Yr΁RU34lJs5\茏>H*)e2!$-HN*hIGKŖMRݚRm1SH5ړwγwOZ36%-y9W,4Qdtmdy$U$j*#&+a9sng"?eRĚUV,>&r nT{1>lb,}"N~ ڨ2٨hspDӧ,:ԫu7wԛ;]Џ5A+7vZL?ZMiZgoFfo~| ?(V gw e{dЋḌE,.xfP+&4jcR)ѴRx2jBOh# [, ",ʕ uLzű7xx},XAXr^(NLy فv..R82e%RҬݴ6;a#7ŋω $2&1:#8+Eǝ%?zT2o ]zt (9x1c泬Q[|9cS,%^()6Uzj\DT <tU$oQpX /q,ZOPj]p3 H1"!GC \h%\=CrePr*kv4䪀{[IY}A4n ?5^6/޺+K\BaB"(N_?_FNxb!ŭe].v?R.:s9\;s9\ο\}G;TJJm:s9\;s9\;Pb?"l$ٮ % =#Ak9;tcrѠHCIG+6JJ+pVU\ f1ݩ #JD\ڟWgL0[M0Ѯç~td˶k# f?g =!r8zy~1)6\%*+jtHbb*0cb-iQK?-Z7E:aHҕ4ˉa pt3Rn} D.Du)uf*CV5Ƅ,-Lfv}nvI?ƉWc̋$M՘RŸٷ(mf`;gO}oURs@7G>'Crd l1E(L$VV J .9;6SSSS SuHlB#q:IX%!j4ȃeiKjicI[W2*0Cʒ2j+A̅U;LDFҖq.3289Ě]yCb[6|^btӮ'[M+nvQYBK} r{jV;ּwkdѦ. c"@UGe`Ԁ(SEl׵,XJse.#`' B`P9kh85|=omVMrE'YIFAJanC/U%V΃b>lԏIx1f!Ę ' }H˙C`(Db!n)TSa۔.= ht$eb $j/T|gk(9N+~Rcd^ig}yznzz u{uC@nm\bI&Fhsץf A!oZWԺi]57wO9ofI7f!nZ[=o|kC -7C7{Y3.8Ov_:ηt<]ڢG,ޘuq5gӟ5ݮ'kRjSONvKQZrd=Qe*g?&8桒 ˢU+7*-sΚ6= 9d9ೇ%DYE>X\zZ˺q+}1h΄$đ _ RSYeLb`D,Jдp 3x)1JA|ԖhYU F6_X 9]MuĐCl`˫]2mY]{.d Vfhc8!:Ë]Ypyd#!0&r%pT9.~74O"U B$Fh#45A  ќk:|II,'@@|<U27u m4uX.ϭ#jI{$rR6CWM,I mtLL%w_ BOu5r"*΢V[aLtօ ^} NZm1xi0 {nO1ʤ]DĖ[-g͆H#mERKqU4j-_/]WF!M1qQ!ZC98swǩ7̽A/Ҩ~|wwߞO?w''߿ɻ+ 頄L6eA3ڴ54)C0.6fnN3_#t|q7Ï%»ൠkNs؋y8$9´P)XǍ:S|qH|Mש^ٓi0z5O6}T Yu&CZmؿ%b?Zzu:Fo_?n|Fo~F''@[}5?%`7E4Y\h`>fcf&iS墊' zq6]I9烘xVm|?7K{».6O/ تOT́زX6. 6L.+ N[<]Uhů^UA[K~=eݛDjr궹^W +OĒF's ϵ& ^NΦ,Jy+zzgICQQdY_fqKaG[W8^ ='KXT^xI~mrdy͜)boiO͜<5zPs1G1 f5W_w3_as9hq>b\&elܳQ6{HB{J(zelBA@FĊ\8%]DHD5AR`>N@{jMnj!i;0=OFdӠ| 1ioYyFgq@BH0:xF탑(RΠԊAi&Dߵh 'lGDz-Kq#ƻff15|;$#ʁ]Ɂ]Eq,Y =tRcEf #?_!6T6uJ0:c*OU^%]CʧJs(:͡|4HԶx%eIU:ICGrDfºܪXh) Օn&;xDцoC,.*8<'yǼ[/Nۓ0ge~^ڔ+V[`RFD_+1GK*ř[;樥2fē1GA6|`xa@x6 * @v&0L ރ.%]7אe :ey,%imZgvɑe^"x)1=3Xl;`)\^̿o0%ٖm]J,.(t$<A(rR ^c2u",ZCVEU9RXXN9{{%izaɆ` YbK^#vohڛ3bz1 Ϧgu=!ɜ6ptdpd%!(DGU|8^|)6l"Sƨ10s4I'& eӥ0'*:9B;K"pĽWIKٟ9g?Wz[~, vmT@TOils^|=QHt ,tu>tLhި22`t+YP%%L!ZTeǂhUdFn8=mrzZm|!Vjz|>~Nob׫\.aEU~@"MNX**,Yt9[t&@Lj)L"A d)*y.&W>VBπ< yHL]`,KVQBmfy*_q\Ɂ UqE"Rɥ\,L)_唔Nt>x Ra7G(͓ɤ'2X1g-C%C!Rڳ;.*mod#7zx 0/+aiKPJEн/ƒ4:RDi(kB-`!1m_\Uq-6+וgMV{m53q&Ȼ1E*N.n\`uOT{٤y ؅VڊڪPonmvw~8 yyJhӖ ޡ#Ոk˻)]L mװɷ'*PZ2E|*:gh*T)}t5'(&6rz}皍6P9nfSUD *AgNG(%d%T zm@XNqn66vq^~;7GUG+9}(Ք>CjׁR@#ޮb7Ɋϻ.S|F ]EvbYVC랲$dBp>T7ly!`-1ysU=9*8ǟ%=5Q(fd˳x%@;I80̋lLU.*\^,kWS sT ] tD:u<.cGRZj4ɳ 3xf 5,,aaht)eGi,:2#!e=PWGs(L"uċ[--m&ENٺ#jiA9 -!_IZؠtsZ:Ҳڿ90p/:xQ.dpHf- 0*ڃI:K9̱#c<+$~oֹÄ^ouSx͠a;@ہXK4Lӓԙ9U/[~B@C\t'1B7CIӞ1;|Smj>m b)̱ !QlRJ&EAUT &m]( [AZJW (Iz\,? ]Zv73g3drzpF\ ךiW(>g{z}rbYYh{';L~S X'+UUQi:Fm1h( 󍪊tyMݪW~yE >=a;{+WӴ_9'oz>9ܿ9l('9sNRSK,U ͶH] #K-صʝnfSے]uyۥhO>Iҹ(9c-;&ʞeL[b-j#B W'E1cq+堢HVc..ʜS4N2٧П\~? fd n< ˋUP2' =|-7w̧"]"P>c50:y2TL`8hٹBIbEȍxnْR!֗WT =9MyBN6'S(DA5263g8,0 -FƮXcpB7W_^,>zs$4^;֠ ŷ#erf 5S<: 5VJ >EH!ieAds p7T/$%4Ҫ¦66 U>3xSb."Z\9.XP}cj vb 5h7>H A"adHVtl+7[!)Bf؋:[ƚE4*+#cjA5+'a3s6a/D*0 "6;"TmQD\>Bfd1%yBpCF)o(!P}߆Y;gcsu5v ӿwϟajʟ%Lk8+|R۽ϸOՓ鯓4sQ?_2!C"_{lΛ.ǔn;[Q:[~Z>⵨dž9>ׇѯo/?̯,^h |?_G3ۯPhTG~"{~ģq5d4z_©=%cxz6e?}qh|Q?}+nWo )GGos'8߽zwOW5VX8a~ WǓ]6Ux_ 7 etFYJJuJBJu軼Jhyw~ܞ~rygmrݳQlHF/?&WG(<vRuR?3hnsKJ2rj!ɀEtZk xJW+Ä|сtK'?.v]vټG6xv`b p{cZ Aꓳ˫:¦]sIs؀b%"+f%@)"N{Uz-ZdUTLU0#NeI5Z6 s43ƂT0ڨ0Ԇ %DJ> *v>X5_\$QQȀ `O_s ꣝7>WnMxˀ}B|q}qGO)@)Tv\;P@!`@# # dSZ<̆h$RuFaSi+C@N0DFP@.Z:J9Pi*oT`:%ޖ ߘ!63gK ]N.~~L Rr\NήF<|aD>ԅWgmksS=DK:gEsĐ2HXUE"$3nuコ^b!G42=/%+9Hkht`CKe8y?ʳ^<\\\UUnC^7>SNWgy Ds C@Ch[loulU_x ^csMWy^ ϗ ߺ˭kMLpw*ZzOncouu7kܝ 7iޓo-;noo<0w;i~6c~+9}ݢ=,{55oMmM9}wd {no~ x%С#5%vW-S Jcv]lT7\ocžAZ"h 2P4"lȊr46FEVhSK'1S>`stDNhsۂk=smFT΋)9gbp)6V,5*A"A6j-j@):2#sw<ȹٖ#LNSOYݴ64&_Ƶ ' gn&L|7>\TRj&E )f0‚H25XCI3J#4gtb|56}DZ-7`P-v)s~ۻcc7$(-#umXS9C>t``NPWC 0[yK2:yR5.3ޗK 4$P+UYb"Ιt,˸Bi2ǾV7.s(dli??)2jjmY`UQ3csU (ÅQT.jO%fUPseLdCJ3  {hs)7N'Ѷ n+siSyppD@ƽ`Ƅ 5'OtRbZzՈ hZۨQoJ5wc4;lF? *s[Lۗ/$& dGlT:AH dN`9g(2x#A+{D "^nٸtN390qV*K.N4c'̆ZIyB?3tj\bXˌ4q ɇ5ty)*is agH,73A;ot2ؔ4Ʋ!-9SG9c{+=t2w, D䗦Ad&&m59dy("!f6 ፕ45vUڍ!ٗ?יhk:oס^XǕ/J%r~OH\B+:KD4}k׻o> .%*IE+o TZ25Y]ϵl'8f>[ND#"w}LSƱ 3̧=)[֚ǻV$М GH#<X&.H-Nf!2թΔ i r!Xi@zcxgRbR1 69Zc3r6LX'X_Jf~69u7kws״)n~fYV+xF'Ɛ1j%0k6 3/dBb4aLfm3RNI%x>tF*mfQ|,HJBk9%&t]((s h5IDrD_RhP7;Amf(biB1E}~da݄uFL"q.eP:@%J`݆K$,gGa8P[&Qqd he"*AE.dI˹ (gO^̪n.NPxfLZ{N8|&j9l6£Ke|P v ݠ|}p0Bn.C?|0+Nt6\k`qsyt𧟿{wӻWG|ۣWwDp?z4A$A  %|h\ (|5~{y\~>nv Â(7Y(gYgsu+< eXD#ŁAΓ禹p6#Jƺd4k 9}y݄68hi&=ח_~ ;ܱ&> ,\oFw}Jg+#I$Ř(Fmʅ(L.&nv}[Q͆h! |.v1ŷ *p?'_/ePzŁ,ȃ"Z"A04Ʃ(" )i)lLZ\2d4z[cLVuȹY$Vz!ߣFQv82=][Z\@Zغi|YUGR15Ca;W1?FqeӀ{V<*::}3,҆b$QjtFȽL0ߢp<ϻ9|cԥ]7+uqaoYbw@;2B?B=)fa۔.} {4eet^m]}ϯlc%[Hr{ߝ"UvQ2giM{ܘB )v&8QZ.'&Kb[O[NmL[\Pdf#˿br"bTKƐYKvLؖ팜E4}ƛJʾ]}2r|+aѝ] `L5-M廾yI]$#o_?.9R˗KoNQyF:LʀkڙuT\dLpB)nSdTF n$}YrAГ B&gp:EUp2T9 ^銜Oqɸ;ڶcԶ=j(A_Zen2zMP(h7ӌ2}ZfmdUwF]i(r +:Ȥ kbbAT RdDVrvc<쌜6 \ 0 "vwDĶPgY="^׌l6&OHv'ԫ@;O9%̂B:*a=K9P|34mHJs5\pG $2ȒP,HAu>Sg܌ p풯3.+.qQNCc. 7tmרQg?X\͡{ηAWX} yILLQcRb?0ƲK! W|Eዦ:L[7{wi*J^+NO?iB>QPQT EM*Q>BZqER/бm:>(Dp蓫!]%1emvTfy][hڋjlh?4>Aʏ5/at*`դ-'e Whr:3>=_ t:+Y;cHŏ }(uNgkR]_~2eGʋq 9q-6Ps dGlfH'H4V8w„MG%VE} bh & &mjCN⊢+<1\@`J. jXT2+@v;@mdl{B5zU5t">J F, WrH# be}0z)K 0+_c"ҭ̆`o5${{=l>N;G@:<2 Z-M.!"=Aoh BRaS"!0-stalic7#imbh<xQL4zn@ۧj of?~T?=k`]qk҆G`RGTPJm6>iC0;˜:ᡶt"M:^ەײZ.݃}7<2w6#=!-J[CZjby sR 5(o+Lg5GJ$V Z־yg``$(R. V 1 p!]6r'!r*lZp_ 77DcV6̼nQft@?Y'Yi1O@T(DL[;su8h[;vPWa5zh! s7lt >Szk=`%y7 "0,9R\%[ H!0iڮӘG"h@@͝QFGR*j_KqW86V;mYST;zE,Yr ĖC/Dن;$R^D"8ee6d1kYstDe 욬]xcUreFf8ծ5 :zFP bx͇?eSJ:{hM45^׮2CHY@ ٮ9ϼ 織CR]mԙy9QYg&ZwYxx5SsIv|킱+Rwھf}/u=1>9>;h n@$WV~ a<lyue>y51UrTHtݯX/d};uɭ񢡌'sqû.P7Se6ٷt9c9X.^[3.vcn7{ycn5Ym互th/{.gT֍ד=*0<[oQ]>ה SRyU#22ZE|{^C6BazF4sk[16q4lg{dGXSc$F`*b v'hr^EǥqT/QDhu+¥ 1!S&J;f2Rʃp-a^jFkUgCt`[s{IBWlM!tˋXaK6\5 B6iE~eq8҄GQes(V{d1FpP(@)H^@ssLT ԉCƌד HitL*"QB J3KX@<6QǮrB%Яak}/M?%[042f85 •H vL "(b0ye>]ښ6YH @yئh^kJJudjxgQBzRo4HiYa:5QHGX.S-fK88-ԼXWQ:c ^áu~ vuuo°_~~{Ϸ.~{O~x{p/* N?Yv&ED]y*zr+3}[Mn(rvttB9:_wG.?OXl(w*NLwv o@QY,$cYqoTKhGs6xߩS34$ɓ:U)_q׏ BӠpa.R] %{2X9I2‹u)㿷6};]=kE1^@Ϙ`?(ԛ'ί'bO_c:v\(f~; 6ah:isj>)|w>|z |/V];wA S0ikl̯q|(Ub(4?eٜ`('eAV8a^_>w@VŻ)*Tиzd9Ss&>knieR<ۢ@+@DcKcIa dYLn|U?8G ׋JV}͵ƥ3QjK壧-f=? ЕX*xAebML %x]53tye_d6ݍ/;L>gi1]MLrfunpd. aJh+JN#,jEB@I/12[dRIXPgmז /听^L;pZLQBj563EE`a LIgpK$jIJ(ӷkFĂmhXtbxpR\)1sߓ3ztFw{NnKpUv.ޏs(Q9"~#B?R'"jaEVrD=QRo+[!_cၱXhT V0bi LwLwLw!P5aYL dVHs+U5VDDu[ql >;]'mXVd;1sAp}Kf- HǛ'R൛cVaĹ^hA9[0޶B-kp -,% ZC]pDv B,* {>Cᠩ[cԑ 'aX"r坵qAYٮ{kA<\dp!|О{S|diY]cw~Kh.DP܋49逴w֥JiH[D[S* ~.7- +8 (vAYo~'kXA/L4RP2㺸E7ł9 ˭.")2|UwNN!̮ IQRrӇ3`M(RFXp4f0gWaܼ'-|=>}cC5bDmmg'G)3Sv}ZAɏՃw?~bNMD` ޼~yb豚B3 f(4pTrCY O:s'PRFJi@Psx@+xcQB AJOZ:D8Tdy0vB刳Y0+Xez%#p5Gd'o^جzKH30=y ?KTm ,gzgyFD0Q `Zfy.DPB]ghX{axgG퟈y .[qO (H Ͻ/<"W9(6B!Y؟xb>z]p] [^wp MWҤOnDL0QL{!#\hPs 01FD.Zqtԇ@D؊DHsY0d`IzQfaM)4:R>'{]RPq}NV,K9N#@j3VQȮldXF)O]n< VHXD+it+aNR^GEc,x4*,;yIȁz|- C7x'0e&U)JBJ&\xm"7NHBhk DR{<8qr\<{L{.(9noKI%@7~aHdRi'D1<, -'^QzI]|_ x6؊|%{wY}[CdąG|n  .J)f~|Ox@g9Hhs)hXf( _}ѽOVE;e$D^i>Rp442g(!lPHTAF^ />݄켨*tj]~=M"D@FHUBYJҒiTn$Njsqyd,Y*uiΈޘK++处&dVZ.W>#j` k!s}tsk w!lս"n$TLc?Y~ƨFѭXݫ4*x3Jl0)VNbŽyU)Xs._|/)3jDo@{ z| 5L;-^Մ `?uڝ fI^-b߭wyq H.괊WMK͖6QO*+*e|{aQW][󍧧OEbOxwZYLQK _0sg~9#4k6zWz$*{ 0svK܀z 9gz]s88;ɒ/MTxyYsڕDĦ8aR%!yRa\k?v1~̲G7jz Czƛ8b<`jgrtT)qXMe(虌2긢V*f`@.<U[TYR J9Ԣ%AFPK8P+Iee:*%d" T^q$\@KeasƥXEdW!!*;BFYiS%CpX8I{gGN;,-nGyr KLXz ֞8RF!z I9DŽRn,ȹM)uʋY[R.k+)$1wLEl6@C cvEHNK`8|0K :nX׹ަ;M`|9ϻ4}(LIR7'&Ok'F P|=n1茆i,nG7YS+?y0.K^--1C-)H۬7g-T jpcr<2(:o;cƌL"(#chnD<8Kߕo뜾ff+yyc<Eb  "X\jLD}2X;*z -,(vGE`.0)T)M$R.Wl^}1ahLpu{,"d[r~$"b*iX 积d%pmy*TXXfeH8mExQY( &((mTF;([wV(Gk`<P=)84 ֝7D1g&ʭ5'+J9 R.V̺Pp3ty~Yn՛N`~ F_9LJA99+(L"II$\* cSqHF3 j-$M*&%^F0P(e:0- 9r_jfmGC?g\&jmYk^k5SF/3"+'81L1H"̄;+4JE]4h iU>e9 Cv4H5> 'H .I ɬ5g>)BRǢǖjΦ15b[9fCn`Z`6cXFPc fՈZad`gD!1)DXҠKdAbu#5gF!T[Ћiڏc:&%EY/^/zM&y~ZR (1i O-"JH"lұ>{ParY=riࣸ?j6E{o{`$09aa/dtX6! &cAA%2@J0}%gS4aޑ+#;;k(=0m6r<1T)b+t@`:0 Y@SJx->sz:0̳sy6;z_)t()|_iӶԿ:-_ Wz~nmq82\RZ(gsU\gy /YqBPL0Y=Qk0Ԋm법PJѹAUE }ͷP~8-ބzEVba84!</ ۚR™_sD͓--ca%ּ^ƈ+NF988yDd($0r"6֧TaK^$d0i@IL(FNIB(/hU4bsVAMm=3{SH/h}9OR|L[:^_^Ma\O _S1N~kq)95@JDhvv^,:d[]t#~-fMn ~scˆ*Uxd!R&R/5e); тFQ4`kLD:[,;iW/`?[ ]N~|jج>A9UKdcOP"y'p|J7dbi1g;JƑRFwNd& ˳|CބpqR[DsiѮI֘׮\,-sq?{ rmpuw-Jtn΍.A$`,ثRb|bSOm ~xg{6]Oқq-:nMqkR 7U'uIi0.a\$aQBƒE ,=bElOCtvYXf|ȁH`SKM2B+a.r96H9&r6ȹQjEwdVJH?B"I9fY S .fmZ;^N@2: O`?<:#l@i#nY @G 9BͼKgpq:p-Ъ>tl8yO~2_n" <9\Ʒ;5Xi| or3;5o8o_IC{>&hb&rVg%DL)\jk8#pmIB_vg^M EK .YK%J LNk-%vgOYOKN^!>{gߕ~NѤ/+z;ã fPsvL1YiS F*UGFH?xJA &%jjgI;tQz_>q7<ݖg8=G%¬ٺ?4>Ń밧<4QQ%jt>n&6rjG>תAy>j4$DI{r [d4"n"]u}rIFkRCa*,OT$3G 5߀e\hcoMM{V+ߓRbu>a:{v0gݻn|xJ .̶\>#O.|ZqYU4T,pY9"*Kt_O wΧHתiˠڃ}xQ$HP{ A2kN,ihRbZzՈ hz6|xK`Bg^0Md藵T!!1HU&;`9\dF!s9CQy_sp-g tEO390qV*K'HRD9!VRde=28+4 v@y }9cû O떘 wXQ)NohY$""ˍL 6%F91hər>Cv^9EO7g8|{ֳq"I.@0do^*E i$LA'ÞtyYk.ƞ`/s쿼a&˜L@S.e3*Lڍ-ڿ0s# ,xBs.bOQ̰KDwkb]-GCꋄ5S?Fuّ/ zvO|l\)s :rTI[f[Gr\I`9pd& Ll{6#`r$C}5fc]]hcuu~qr$ny32HmI{%R?+O_Fcj۾4i}^_; sH /ו/۝\* ̽D!8Ay35hk5(Z*"%YҢNDޯٍ8-&OϯّqWx~z~zj͸OC P%#m !cA[rD =gz0!Ttyu![t[كB-IސQ/L%cP.y^&)J%f~OH\`jCG\(ԓF/sR"f_%ɑc JK朳& wo/ l'VAd\Ǯ7@:SG0icy vKhkIZ:V$bМ GH#<X&.H-Nf!2թΔ ICB(AƁ8Ĥbh5]>js,Bzgଘ1O!9E4Tt7lkO\9EW<ÒcYxwʕ<Ɛ1h%0k6 3EdIBB4aLfm3R<$sNT&LMPޒF* Q@tl,`t&HKJb ~y'Rz$$w1bĺ s[1sƹQhP:@%J`ۆKD,GGAcC]MRDYJb+LShkm'-6JP4UOH:FPxfLZ{N|0Q1Mbrl5ґ JZD.A1*W7ԼsB]~x*}5JǵMi9Na7] }sLp?~KiOHRڈ?i=yβ3t55*S0/.doogbi"t|;t/j]ip(!8y8z<BۡR$N<7(uQm~> ڤQK}4մ?|qG ,)\D}Q 3NDg9]9(ڨ_Y67o~~-m*A7'@Zo`5Jd|??hޞ*#l_2hׇ].xWy\ű7iw"ou&ڨ+"2@`J#ٕ4bI$| Ja^3i-7Ld&˝I4G3ph,qdzIomTDlT&)y;:gjc{Pх|G.h -/"鼅hIԶ\]Wwŏk,roǐ~^tXґaLӳں^;~izЁ1 >M)~`xJԝӆLť(Yuƺuc$tmC.ֶ\]Њ<;Zam҇=km-ݱ7G6jWY$Qd eIE+F(l[f g5ESIURJ_.dݒMKؚeAn{qՇ')#HR*Ô 9Yy)\8,~QOCOăOl|bXtͨJy 9I/\J"$+ tRYxh!= IB&YS<+Tl6{oB%pJ IQ(üf M'f *A1VR2I=I(7n8\*CnvJ|lco6y-&Z쉚~f9;oIy8 *oX4Wj(]<HIQj-xΐDa\FF"#6MsMp6];Ϧ JdϦB61{ĮJW쪀kF$h`ή zv5+AumEqݾ|//_>4G%Ov썏~3:&1o:>~/n|nU`g+_WMB'1**/PU2FFz4-Ug%3W _,z6 *+}_$x2_[^k[ownk0`*BR DB̥:Hc&VZV2CEz`cXgiW&,1S~QH̳RTB~хɏ1H Ҥ+bP̈́d>+fFN}g\l]&Q:2jnfA92s T,ȃ"ZI#A04Ʃ:YE$"R :2/lIuȐ{n1! NZi&s9sdIZл}A"pQle\CG'..wwMM1GJY 5{\FJ%@ eJ4}pW\9j:׈9YE kk+EJip8*1VAjtFȽ%%w[If:IU{حyd=ANɥ0 Ì7ܦ=pcp[Ȍ\#Iݱ>J؍_'`CSbD2IٖzfxID5E!V왞~UDk45[!-K}lV# |g}Sd%7 A#VXhgDB)DQSIЖ؀dDZц -,(vGE`.0rroD6;h8TAftAi;=S^Jl|Uy%`7UF%' Too]/-wQ%૥U6ԔJ8f%QD&׻(ZB1굕3Kq co七BugB}V_ N9šKUBU`bǃ-ugZ^da6V̲Pppң4u:X~YŁZ:4&ɯ v/\bKPΤ`*P !XFa H.R fU@)cx5) W`3 j-$M*.%^F{HHLBDN^b~πCٸc[3KmJV]Q)J̰"0F0E 0*bwV /i:UF.qVqhr 1h k|@N@_8t5ɣt/Hc2lY/+P? HlD$DĨD\00o7FqmH3- e!AD[1LdAF)`QHLd$&9cu4(I$f.,ugHmA.fq3l+EfH[ŕ901FhzRЂ9nj>10<(! "qǶPf[;w &Nd G#Z1~]jWE+ޓ6jƻ8wB-5>M<;oS0t E/@#(ŏ }4gE15Lr%7Ͻ^co"y>"8 ]iijB3eY*՞T**UJ⁐= G:3%gr)'ZZD \K^ƈwJvuDxqpwJ^x# ,bc}JkDe@)c4Lj&{\"'EB(oV hŠy̝vX-"tro0CJf/hmu՝VF.R- t˫ 53yQ|]}!ZfK6u?ȇ6vCXmW`aJLI_Xdʥ?ʙ ㄫM|kzL{)`9"1ZX(')a!PɬUdwb+;'MvGXʫAM1YdMf?+Obgf%XARG"`"RS;-a$EV P|Hg'=%.E4&> ~Tg>ygrp9TKe2D'(g&R!)nb΂wƑ\D.j]iy<$3qyo\jMH8]⨙;'mB@%ՠ{pPftZƗA/;-`;W$&y%7J!JQ+ȖXRp2╇w[540Ʋ(b8X? <Ԥx!Bn.r96\r-ɵoZtH+^3F/..*0Of>G u 7G GhɟtVstyWX^NTɔ¥&~Y)e&[5ϙ`tv9s#ٟ6>+PqRvidcQ0RJRD Fݙ˞)K6W·w] lb ht4ףF> U=ݼ6+xjMq Ò#8*r)APOC` SC14M-Xhm 3hQ9b0XJB@GCfPeY'sXf1#,-Tk"v~&t撔0Kyi,eʢ"9@"GB9DĄtH I( hJR{wMr{ν} 1{F}0UIAn5U7U8ʮ 5Q]ye"T>nJP*]֥WWtRf5v٬*%r޼U{i7Y7yj~Hoy}㷫:1o%[Mk*nZÓ[}VxdWuF/]j~bo[A3Ft){&V@@1' zEQhM* ^,I8;8޲Ie]  fi8gYL2PKƸ/b)*U w޳csKAL1 r̽Zr@XSc$FTsLV!Ut\G%28Ed :u ap)1DLI8FҎY | \K,F>wl;M`:1Cct=|fqzM8dM6K%=_B5l*7q,0VGQes(< V{d1F#8(hZ Ek$ ÃI[u;ͺ@wD@4:&(gEwZI%`APVdb}cP) ~5DX&쏵i 4BcƔ&OB F[Ai!ZnAEX,ܲ^xs-?ִB\`䥠lr@e`M qYP{,aw*E.mR 4HiYa:kQHGHJ,TsP 'o\lb^AXׁgd wVwPcկ ׹:⫟Ƿ/Oyӫӗ߽=:o_}UxX()R3h/3q*WӘӓ7hӜC^iqN-sJ_zv.炯lt48E'$?nvB TW4IFaPZK!?Gf&~~G b)U iQI`Fv3Qp3`wo?G)gB3i]m|[_'^_߲V0 澞΢X??$b] __ܪcB;.U|3; M;mTt<&זYwz>|z)E߿]܇hS>rԚLw3N2ODL0"܇ʂpYj^wo ;NcU|f|Wz60.n97A+#$?<^_SnuQOAx *9hU\1bKcI``IL X|U|y=:o̢Q9su+4M/3q)0%գ7B;nLjH)Ɉk͖K$^0`nd*N jL]jI{w|{5?.GNDy^mSfNvz' ?9@3L mB wEH(23(ñ5A&H‚Fdnc[:Z|Y-χd bz_ɞlyl}V)-@1ʔt'NQNT@)ˁ0ȾK]ksFcމIq x}vgֶJ~?mr{֠ڏ'X݇gVv%g#4<0Km*CA ƙARL N+R;}H"3dtW@ȁYL dbsb% 61N{ޓPgR+b #-? >9 _K(GRbY"oAs*GnV9"GSbQY M w:\8iC(e ʼF,sw+G P2f2'Ȟ[[|lfH.>NjONө( UD(TuEbAk: ;R%4UDb$U ho_iFS~ȚӿV<bP77zdX-LZ-6m43vhsޓ8!dx&m~YJz]߅mᐏ{{(;D6ޮ竨!{GXRJ׹V%)I'q܎s,~.eg5Тɻ qBbtc8 k!Gaeo؛}cVz5q7Kwo7/AN}ޯ= tޥn\q%Qn!q`.~0,蔭5!jw £ 0,倡:-znf&0ּz&3d5KVGG-)uJ?zt돊8}a-m7w2=[-XlS!%9B$zkB-kLFdyu[gz!O2 ̥˨TrT}2 "d}v WSl<$O /3 (]/V+}8FnGOu1JbauU;4VdؔK 7-Q?[?D~DޕA fԽHn 񿇑39@\xwЧpAwt 9KCJdz#W2Attf {cosgSh8f =Tx/?Om&Y?:|=iwҧf g|&D7XK˽qn>;;Zr<ޝ9P|dpUZN hl ǴNz&i9 :# nNqoߙ 5bˇm7'eF\nFt"]!BsU[fIy}'⚋\s~Z5=לiqoy=BJt:v Ŕq7s4cHvʮ]@oSIN7;+ٟ;|j4#/|Xd;MDw,Ev_.[ng̟Zu9>V +L^OZ?0_O>~o~ߟ37YZ{VbyVfxq,输 wwmGr朗}"%L/M>Pl]C/6MMoA>oJE٢}:amvk>D}{-{.9O[} n޸YxwcnuZx'jPY}χO}vѦW08W07gW%<\ʽs|~j1![)'-4ۖ˫1bo~;g6fT?Ce|t&Ϳm\-C4Tr&c.zwqs'j}z|A7aګ$[ȑ!oR_ɻSJ^arK!}fJnDwsP>fe`\UPd=OhE q 恹0,q40qv6nW?fe2gi0}ݿ\s/HRwF!V6Wm׍xV b'm teq^+?x}W"ΙT"6̶>'3BȍlkqJ&6}}6͉-nɡgk;ŖYJ*qu$~_0!} 1e i.f9Xē ϚŋDŒ` əaB7dS [kx f5x Mk@]05v8h[H[Gvi d)[0(VOB& R-ϰdrxc0ZG.>8;Y RW9t(qx yV8Fl s:e,qCYgEe\.3yG\Hn4gmUЁ˾'PQkI!#Ҽ> [mj`N$qlٻނV*8)cq^bNqNc [pWO*]"er@&tݮw 6 !\αZ@ hWl;F͠j`o]KmLf A&-'@ X"Y0(3꾵Y7w43b+|J2\W"tX,p$`&ĎI\$v#)dMpPRp 3D-he*AΛ2a *Vz!Ԓ@b8+Fvȿ<`+DudDw"=@/:k? A獺 U{YF&Wge] %P.-S#jO5jtz_TB$Ed8$ws[@ e >PAniezAZM¦L"[@NtrcQKLAZT1@PшR;B\7.wf`⃕}Μ73-x M~Fwm6Nf B$a1ཀ .MH` dUɒ|A餄6R4i*3h+i<) 8m,G/XB zTx$Q"92DC01G‮tT5᫮&[O`^txw 3pMBY,TGwCKr=Wwl:v&Be]Nb; >m ƷxI%IpLU"tF$4V=3إf#Ŭ:"D ZsMZNFt ((L(۽Xsv,Vr,@br) n '$ [K %aNKJRY9#$ƌkNk< 9LgPEV\"8ά 2 Y#FJk\@`?4 N7 כ7 Ҭ4,j)lJf< :pŸ?~VWHgY gWNSLj$,Lo;PJѰޚ"ƂZfBUvPf| Zχn䓷%Hk>kj\B66-k w^y>=Ѳ>\߷oӯ6iׇGs*< D#x`K@3^#܆r0*0{s#f1k"NR ΋E%a[6<)lp|YiFf@!(!A/{s+6ds!1IE?- A,'3 2X$ $'"K3@Olr<X7fE Th "8$`u[rntrwp=L 9B|'Fʟu7E7LNT>pNe@cdvb(#YhĀ鬧{ҁ̺=76:iy̦'_19}} "fjfz`[Ӵ3dd{=Bd2K+\UćTC~\Z-kPiRvmN_7fGz*J6ljg Lp rLAZppM__`eq\lVpJ7a"I vV9Ǯ9%liVAҔD|9Idj"`փM氂H*0ėis' \hI(fW_ກ銇@QK!j)o0P5wzwq^i,A1½Ph+\h vץ:Wp~iHV![y+ rs-p$@H! $@H! $@H! $@H! $@H! $@H! $#$8ϢO$'}+n&OV! I )AI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIGL)FEH X@ 4!@Z#*AH=JH3pBI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $B@0r'@ &!@Z#eRIHi%@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H=z0lnɋqjZJ]oO W@CJqx~ +.A,+z.R\e2ң.5=-[we-Mτ,m80 IE)&IC1y7]Fn*-lEPě: 5h`ˮYyED%dRU܉Xy8FϞ.YSDU/ck zξ:|X~v,_-$:žvuڎ-^ 2y "\Mj7^ժ) lU><]~5Z`"O$?!| L'&id;ĜPZ0BmA-{荋 Zbiϫ)Ϋ>FUR.u"jXLq싹*~D] #4W*-EUޘ+אެZi۸\)Bs͕DJ#s+sUĥ/H{gE\)As͕jy,ZU "%c,2WE`ݟ"}1W ])1|#5=#GDpIW6 ػa>҃2WohnTp~un .RqZ']̡ }0Jo˔ˢ+:J!IlNժdYN\l:U(UoJ{v.}1ZrA%U5VkZO]Z; 8 r~hfnwAp),gM…]R 6ؓEFdL[)Ru4w[øz JͪHr^׉w~ =mlٶ[Cn4#/Fp@g4Qoy:iy|4'Dgϧi٣OB5vRMʸT).ڲ|鰚cf\c9dϹ4e2=`IA4$P<3XTR έ.YfpjY+SK{Zc .qjM>MA!ڏfMo~BSig_=^}~=Ffpx]ޖ~ve>`ϧ&Fȫ-#xJgkCfټ'Em=2\zk]8'Tq;^u; l^ Jg{ S$I"]u.1#\ؘ}_RWMCugdL=wnz<ݵYg)+HYںJΫg{伖r>L'-ncяVu:J{_q 8/GWh8Ά/;)&Fnݪ("W1l!Mu1n)a!fǗю!,!,-;V J[*~/yue#(]I> v|n56O'׉nzG^yPi6hĥבD<$Cbf!$6ׂ%qlsĈ6~`}Kbp~:8RNܕ>^M `_nJuUӝܦrI.8rM#UUo%}UK{7_5SxHyH|V8ʻwtYYW_;h\(Gu.1-Sl 2REyMw'xPLB7[kJ{,OȲt a$)ǷCEġ{=*^3k7h4> D3Vm38Ϊʺrc10!4F3},Z/([VuoSKP>x]z#.{9zg~FY0mn?\pv\[hiWrRm}&e iu<۽s^ sn[ܫ _`yZi~↿ټhجr#4h,Oܥ+f ;*Wxwrjo|sɺ$꫑D"fP KD+'B5KIC>ˠ)K|tSwrw3qli6EYjy֋fPߧ-?QZ yDa\FurNahy8l:| qua+H߮M5tqE=ʝO4k]?g~~ˁѦ ]bDSGXbjI,S/@"OI(HriwZG]G$+C+OgQAq@\=3qϠmyճ/ak昢r="K*=NCK&غ+#9izA)sF:Z%*r%V˧A`YmeXM%%&.]Z)CTQgiF59S) 2$h#n`fU .ax}L#m<.AK$QIł,!ϚI'H32z=j)3$HK$DRJ4jn7rtO:H={3_ í7Ƥ}].-DXV i6*υg xNRJ$R@T8-½X˗[4 h>apܕ8R !ǂL)X6F9!K84Iɻu)Hcaڨ5g:'gJ:S J@'@֮v&-f 9~nViw3ǮM^Lu)?nDl; 5~R=a0FǃMIK1hNr>L ދr*3O#-fy 2"CyJh₠i3K8LA'Dw݅Qח]1l9o˲<')gfU[fE25YA);C@:}Y/[GCqFNQב_/<_ˆ&|(NX\8JcP@Di-ZȲ+,%^Sci%kv'k]o]ɽ(v8ΟGG~IFKAj`Yhca`4EIucViAItF6AjnɾYMm:?CmUaKRἮ=s=%1_lV \< .%0UTXy#B9qYծpv'2â@"yvՇmq/;|q}lGfnҷr '&<+d)QI 'YC \SYeA'. xAȅwϹL*&&GmiDѱޙ8{bK=? q"ڵŋ"0T0B&Ӱp%Gg~,rt0.kX$!e׊ɬ1n!:C˱sL#DCdY[E cpIkTat>+_2*o!"i֒"A2'Bk4zDr _R\>D>nk}E w >?pgGu`-SB)i*Y4 m(w %C?>jHQ*JV[f2OЧʦ"Z_R T=yndVؑHU1iE.P[h-gL#4)Rq+%< ʤnϖ+)Ife/ǯ//fi0Ƀy<ó4|g~yͫ+hWxg BG /x,^mz5eӿ=-㽒wY-{꽵1 K.oxqv8 e-bֺ+S@y˰tnq;Ook˅6R)XRGfk qӠG~6UmONoώ?SBEI^ޤm8Zٻ6$ ،Տ6=_IF?e%R&)J}g/ԃJCk v8ꮮr>&kBkOy8zq8zuk(Rx Zws,I?ymLLo 7|7^ bwlVWب;OT:VӻdWdKBq:;?=d>}$&6!}kws!eQp(3J`en8Fq^j.^?mTޫŭxd}LtDz_4%NjI^'m=,)j?_ %L5>=y6,w/&^/zy+?ռ>Z5_g5"(s[2~r -Oj,*bR.V:kZ1ayM@5 xS7Qsxz}y|PuF@>}ϼӆŋA֣]%g_mn͖'N3-Im`3HFI˜|:UvI jtzH{W:,.vL;]J3!*gH2: G"㠕wLz% r+7ZPTWk:Qr^m{mNvN]ktZة11j}ȝMX1x_=(_͢a5꿯Ш|˫w_]]gVsw.ᛓ, Gxci9I)J E%̫W =jeg$$,pUQu}?TBCrGM\Uܺz:;y-Oڼ2 xWT=G͕Ljz3`\~x*G.^^wf'_zc3QG/ˣ`͇^&Ϊ4 ^UnǓB'WVDTu @D颶 @8e}.m3.LumUw떵mDpl JUNw"TuX0hjaƦVݺHJ=!&ji.*`7  F8ЩNݺĹGr;w|& Tm;Y9py^; -Usr읹q`wƎpo] (&D"93[JJ¿E#BOZ*][rG#G@| h4 /)$)=V8fcd>jxʹSΈMLOye}ڈgioā 3:&5A2@h!Ry<pӷ(}}'7 ao߈CӴr_Rʇ]Ѱ^J!oPlaWvf{/ƟNWxSsnlbQ_~xꀩ:{鐑<O;On8)V)(usSf*+R-,-#T't™21T[T6F9Y$(H9w%X#KcC*7dS*~dw;lѣ;Y (ךY +q$ZQ l(ݛJ\/VYZN`X=+nJɥ7?zV6/<\{KL+jD-h)xY3\=Mv&Rwnek1dyYSM r\k<ڋHr9F~CI3Z-F1d!YU8 [%f4%9>Q$AKg\p^CQRţ=iكs5qcġv1.u%ly=.ޚo2Kz?J#9Gĸ4{(b2Z*Rw]ִcS>*vVdtpgG>%b\b<6sdTS{Gů.Ahyoiv;y(6_tuq[VUjPz}4z"C8 X3領}yZD]/bi reXg+(Ң,4kk0c.O[p36SNVUѥN=z"J%8:Ad<]Ƕ-AmI2"G(褊2NR!۲sUlN!Fl]_Uˠ\H5-e*O LP_&S ZdN( V: Y+p2R DLvQV5r֗׫bfdMƈw#Ztˬ qkLHڅhp ᙑVHu /;K=44f {iiؕu[Mt+ٗdvk:]ZX\ ׅqo/P/]/l^G^0Uqo: eR#%7. TvT G?)n4`1MVMsD|_@!2ԩTiUҒwG+|^ٮG'ߕC6'Y3.8E`UH IWi!- Gv>]leF;{Sr>m֮Z[ڭv65{Y1) DUCOuu}j(j(^< SgCp  \eq, aYJ.{zpe:8#/ԯ:\eqҪγ,ہ+zϡ/>9\=L`O WWIäԼ[pEWMAP`do*+J+w=\e)pJTm.cm83RόgF3#qTZu>3RόgF3#H}f>3RS|3#Dvk*Zi"۽H؎FOdbZ[W0k F4]oɑW|IY=or C5~ZeR&)j&/C!yKfUuu2MEIn-ЙFdsZ~h.r=HU&>Z3wMbfq`z.xSvzݕkWާIk UzfVI)}Ϊʺ3Н2|V6Sׅqu-GNd9<9n:Ғ},5 3N +wCB*+ 5:1 w^T,O`;~d37~!$&uHҕa a8T:dSF|jn&]֥2Y[ _ 5Ƅ,-Lfug$w<`8D݉=:13oٷv"8?7 8mA.\.P:r'Czd 0ŘX R\++~xRKN/.//=[2 $A'8JB.hȑ˒*HJ'da|Rsx5Q\#nGu'{:؊m>X^C=oNAdѦ. c" $Z{TV F2%PTX:v=嘽Ϊd9W:,c}> LNk-%u4Zֺ7ǣpǛ)ctRE43iȍ(S&uj\b0I$겗`#_MqއDU$KU}0^'9;- ]3t]ˈQ̄q. %h!0MYZJ1ڐbp{@:%-Mf6KOMyo9@"3j]g.s.+Ee5i`LFGUWK+QE>n| ߣŖp~ET Ll7KgNNAW `VYN9 CpNGH9X?U1RTIr0V@d9kvx^KsGq?{YzO@i=|4G/SRƭIǠ9H#<пd ,DjQ`@ DL Ԛ@B(Aā8Ĥbh5meZ>%m;{N :Rx +>F'}R f~pŸ~I&ZWK ߿jXm*qW15CC ^<7G9 c2kZϭ#iIk$rRY(Uk 6\:&X&fɴQ}),j%I&*D¼7 iWq1p~o5Na׽пN۫ǿ_~ߋ(fbٙ7mj sߔheCnhv]ZF)~w}Jp7Ï%FWL E7K b/?B*6Ji&7G4rw{$&FWK4ϵY_%\ެjyz9{׎LZe~\Mp$e,NO޶Zp'MK8%.FrpJ0&мaaφ]蛥xzhxAŒ80CAX}hr҆ZŻ* i76kfO,hŕQ+؄lHgqȽK$ j|❂vjKWS풬fJḾJ Re˝`z LB 4jDJvJ(Fhb4q;1<:))Kvgi}֡J~cq=`ŋj|;XS+^u=xU _V@l t=FJ|ROh8$hS(% T~Xv-?aaQ>)B|?m@3`Xd\ LZr]yJzyC`J#nIrt\, ҁhLbA  rhwܝ}.(4hjlƶ(/;Px&P(hhmQ^m|Z}ɷRF$_+dp°d1vਥGH 1xeJ}9ی*$ڙ31x^DHvmg_ok"@Q穌A@i?ӌŇtae9 &EXPBP*$ӉY>SXiLեoykBz4S!Ajӥng Ůi9mߜIZ$%o\G,jsehz߂9Aǫ!Wez2kσ tKU|j#At`\bV0 8+ \V$oJu W7/.oFBtI8﷟o'rەič϶EKOƧ;ɅD΀J Ȍs , EFOs9Ș$Q$B; ']L*%Lg9BR{fCFCv^9EO;g8\N4N4cX$ Cy\\.9 o|Lt"vS 7?D#̒ cE6Ydrv)#W9gnl]Xgs;o/0812:h}G]"PԶf;8O'[KmuUЋ: J4zZC.X$Э֣9IJ=r'L(Y"މ3ҧ&(p$jw-8~JcYC ,JR$3F"OFE0^I2H( 6 ԇ 2sͭ$:y"aJlSdF˶(#BS`,9E/WcD',$Y>*ArEM0 ҂2q(͒_&ߗk?Lx6EƠxs 2d`j 3oJF[ c9P9LIW$p H ]Pgj{F_n]on^&܇-~HIv7~դ$Id$CRjVw=ЦNM˛:=&4y h&&L~G<:cFZU h-Ń 4ƩխΊ\H:lYJ_}R#cVfʒ]Zs7"n؝WeGů}>\'ߞvIf8JGc[.trQ=n\'m7;+E Ӈ@#ʢ/[rUY)h-`Qu>ͭD;s/,Nȃd"0H`A(kr/ҥ,48"gMhVUNɥdoM)zcp[ȌGg9rJ{ZV|歔ݥ=?o/ȅIq#WQ(0%Y:##X.'eQOQ\Uّi+U+lCD!o˿br"c(Res^Lh*{[pv;j:ŸI~?6_u%/5;7/Zv$/z]OGۨdsn_K6Nd-w.TlWeg|uO;#dR!Zi1+Gt%cφs->dhn9=+%y_.zR0HD~]>:EUv\DRm3ckpzךVioH[/PnX_\eeaMs^jo.x<88ckgZIDN':!0%Ss%QMb0,~0{. S[hAaF)cey5ڄV2 ڴ-8O'q# kfm푵 MD XMEQp`18*9w^!k\To$=KR)4qML,("HE\Iޏ,tIڹ58W 1v[=v##n(fcxz L 43Һ$9h$SֳCڭc gLi8ETF  "i%m/j nFkċՎe]lJE2/‘x3.X"HW袍I13a*9.ֱFG^\bW5ؗu||P؎m8E1rl`/>x&vC1Fُߕ|CH-O)Lg@,`%/?6K^ _ӑۋLM1C>LgW&b}pߔ,Y: ؾo~xyZâ]> iA~Q#S Yy@\]Y-nY烾6g$@:P֦E5jJs_7_ toM#٪8[T57,NC_"ftyMM:juv0fq1= àb ]:||@8^PgK`)5mϽ^s{DWX ]R Z]Ҙ#]=A2ѕ) 7tEp- ]Bu J8S++}  ᚾUA+;%EGfJܱsjK^{luu?UJ܃đz.#*lb/tU u*( UI7tUUAp8]J 1GtWѫt#]=E/WѸ;s7~:.c|1ɋI|N $8~L'=Vq8ʍ/WLC$HJ[lxȋ iҋő;LQ_Wt~V{̫^6/wK'8ի:[T(B<ߏ鿖[ h4o'Z4r_~xb(s²sF8 VYȒ2.N^F^nP*:*bJ]cdsbFo_EGqR}],G""Cw\35`LBuxmԒi`LB`ZbN&+2zhcv ?7,ï^T39;|6^4,u{dJS f/?l0.K#ulbYqQq(XA}*iorxۛZ(eB /S CH>PPSZ^֌j/tU:]Gzt0ڝJH i.L?PV@pCF$Я~Ob1=WW }6f.QzMOLF]dGf.`ɍ[.-Ƭܻ<5(Ms~z1] bynyNS),d_ p6*OS< IEQHnmN5;:T[ƎjkiTJOk~pl^ecE1B%㍅Pu b\]B/,`'~).wn5wo gkZ[ts9m*4OO"GR ՟ȲVGlÎR J5]=@oI $w>r#]}2ѕ){g"/tUb3 Jy{teUI]` oEW.}: xTW]=~%<:] d~p#>5lTˊ{j߮皩BEBWV!:]JmHWOefc3΃,08wy1K:Yґů/`پ|Qaz9g}q:w,02:9_YP9ETdgxf'9=nx`K)Ip-c}fa)un& $7K.,QیBD(_+-u^WYq6ͼ*uNZc٤-9ekR^ BS*[eY&'Cg/K;ocʏ1VS!E.VAfB2S~2OWYL/{RdN[q;4.I=vR;4-Mj61Zs>er@T,ȃiET1RD(`RV:+R#" .u$ز^gRqʘKQM^VA-ϊg$n0tgRҏEpmq|z]#`~QH.Ukl|TUi>>ōq2IG受ksM['u-5 >$&Jds[MWbzܸ r7yhAuhdBWY%WHA#hTkSν:-#F 8# ƸIsJi 9hbDG=:uJ.%%0xmJ#rEf8:sȑgW ղl 3oP'e~MF׌{OGIq#WvU Sڙ`H hlSD@1ת&d f< ې$Q[򯘜<$ĩ Լ^kfBˎݎJz5#YJُ&p~ui_rEoV}=;Yen^n&Kzcp>N7y1d1:QIOo{[ܝlrQaӯrynǝJ<>L7YOYy\#}!2zMðY9 ,}6n) ]Dt̨1fY2pAԓ F$R49)̹Df!5fri  ׈ʻ-.2>2dhw\_b<ǣglML+4 )0Dg8&Ӽj6 IsF[%3O2R3`E2&(¨:SlnjmBrHm݌bv[}Y۶G{+Q*8rg%CsLgȍ2/Ip6Yd!m$?ĻX༆Xg$6p{Xy- V7bbdN+ڈÌJ RQ rI,:IĂT/+.I;ײ>l:aM2E#&ԈՈuӈ$1xB⭞WVxsKșVXN XϒsN*%ZՈpV<7NAJs\>TR\dB & 0H.C ۲Fl:5EGdr^gkRXZ֋Ӌ^\'c؏u*" !P6 ]1W$30JJc: ]cчIcnY>rs*lgu^/^#711:c3q #@֏h Ɵug1 cȭ-fv&xlj˶0KԻ}"g7cYJXuo=VC4dxb+K;'<0飱&'!ҿ6*fAlT蹎 O ث 56%hŅ^fG󗋴q/Bw5( ?{7,wcq׳^okZѷͯ]ȕ*e9[0fc;G3fѠ142 ]f(m&P l

RAA*8k߅aqD*>q$[,7R(WDRQ̄W+$B:..룸BY@B$=(Q'I.)FV 2b %u}h)֍xN/r 6pT5Y}`F9E`)*ޓl:$'z+?rB-gymz6qޗuiھ+|޴O66 e3=1^=^\h6[W\?UZڨP}U}x@!W@FU 'SZzDdwd]h}tw݅Gd#ʔ1N4c |0Jo e錳XY#2MIiyx@40Aj"j+dq1VAH2-֪]w>9"@˟+sZ2 ڭb5 _ˋoNB&wb2 lJ${DNIHF+x6X5Ih!tm@NWR,u,Jhx='"܅hV͓x&>d͹7O2;0el"hQZ &飓?4EY25PeRSxK]]5 uABZ r_ >XIk^H2o6_mF'w9 _o (mD@?ck\U`r|8G@G> GC<0r1XۛqLiڇz_ς`Ѧ.1H Ff$B(sQ$%NpiE嘽Ϊd9W:,b }>&'ǵZu68;&7 ;c߄i5Jj<D!-qH@Px0TSH/r^D $|c;x@MQ}H̡d(QVцs^BANYc뮉ݡ~{yBT#|/]fT̹w32(NI'8AsJix5<M]Nf%VOIAԚB, q Ih5- Ѳv֪%w3o^䒈 RϏ4M^ow5v1 7 RYX*$b h5CCƁ ^Lc#IЀ'Ø*fVvÜ#HB/:=9.P&ssBZ%ER2謩fNfc%k4fDrT|(*q=AԍqMV~ɺsH[1sƹ4GJYM*mV.idN"/Im&RDYJhE9T.d鵤mNXmDC ^̪HNĸVE˜]-3fC<*aP׀ukƛl{g 7QKGzE(w4񗿿>헟_9_ #؋4QBDmaggZih*EoQ.6Pְ2B~rO/Ӡ]8ZL*v{q U;fN$յRXg-Jux}hI?&nW$bdVgg`IGg޳UR?%W2_Cqu5qfR_ _}:nt/}o]/N^J% >(Oٛ,rr7-μR4.$Sp^Ce'c`d. U<_.Bl_q`p]x-6ӯI׷[Ƴ1m n^ enNn3@QVF!՗7ggoJuS*0Ʒ7.|{UMMCo{#zK~tq{wzv*V5Lo ^RM9c+s ԰2KANb"[Y޻p_u⁶to ¨V_h^Ս%A[*p>Kg-^cS-7IjްH?=_4. >ĺ aѬg>N(شr q5Y=%56 IW蕱 l /AJg8%](L ھw:(cdڃDYd_=@)1ioYy. gq̒BJG~cvh=uŊjlb;4=?†LR]{2|̣9O%a=_*w7`8a2WSIɰɸ+`҂Cwy)ž<}{j.SZ2dqŒNڲi$^3ah ZndVi΢:M:;"*n_S{k&F?;p>m߸8vd@d:Z+V_?Q(0E(9Qy\MJcZ[pG7pĘӚ/YrdrD*Su_P@j֫-u\og7_^:RF6M#dSl f~ga!>6{(*.XKϯ]xu.>!l}$`z)ܦ={%jtYG3ȮhVw=M܀ זNW.xu~>,uk˓b=9@NB痭nӚ+۩Ӳ|5ӥ\\~ nY _N}9{Pjx Fk~Q,=_$yn.ؒxԍAyn84^>~/--mu>HЕ bdE}lY>;/t:ii*S^ϰRetZx/`IH1ĄlVC6'.i/E|:W$0fPA6+wP+vj ֶTʋe/SFh3o@ g3ޠfA` hpfpyFpUV3xypU5vpE\ZGzpe8C-LWgW]W[%WK-Gzpe9\XW.2v.pU:չt / >aⱫE2p<. W8ա[pFpE +q>pU<*UR^!\ B3+b=* ~.pUE>t*\JWZ5Mͼ%}24/},e8k}Z{\˽9faE4c6?czoDAmon]3֐"< Ȃ[T6\j#c:O>Bb^y3iso?9θ֟a@Bໞϛx; &i<Ego$RZν? t=!70FPiK?%HAx ĕ3(X ^$('QBgWM.,yA uQ9MZo=Z"g9d <̠"Rrry5vv'sM붂|-H\k_2НFڄ9!.^n/y%|jEz3VܐesAzt$jYlU|it^ą`Pӥ#[Ȥbj2|Ԗ%"V֕j 2jp4;+5d4a&h41yV4Z$$C_#:UؤI(},!Bnk$S!1'te8 o.5D&ĢHٹlKyMkfSz F3'gX:f_ (Ni38K%$Q a/&ˍ P,Qc7)RځZ~[rB{R%4m+g='X,k°:t]s6IP-k%k Dq:מŐRko(c!`L0 2 lq܏ÊO4><`w0ǽm|ʤ̡EY֑5Ҋb@k#?-`RVW ₔ\H:/^gr:3r1+DIWsk Yx6{J3ȵTcg#Ll fUф[ZΛr~4ŗi56[&n='#~!XN+U1,d\f|Ʒd/B+G6d6M C{ei/5 1#68ZFio,^)Ie[>6]NJt>`*?~zt_zr\s 90N7vE_h_'m_+(vwUB׿72'4&q f/m^'sj^"pn?(k!Gڨ%D4jIι1d:y&6n7yC!W; ^5tV5ܔv. O;ub,~J7tMtlﮏM@61R5 < 3=B (\pqqsW$wBԇEX(Z:LH"rؑRn)l8҃x)4 AP./^$͝ :"(cm41 aHNɥdoS#rEf8:sȑgW U9RZ=SBF1W?%96)}rc" %$3r>zm\UEMfVyH Ibr"n$\ȳʊZ݊wsI>S]ʰjrr9aۛ׳>(馒oAw$2{9g-v|)=ci!nG9&XG xgL D5:fBȑ>n) -s*2*s{V/=)`$DzGf49)2Es.HFjQVV78d2 O>t5s{,l_۫y_,hr1T;FlMƙVhS`pL$y\Imz6.E猴.I,Z]Av tTzsTEDk8cJ#I"YiQg|`RIq Ʉ@Ar %2"Vcg7"}H՞pqt5Xg5)9Ue\G\& "y?) d P6.ژ<Fi2ix \ VCPW'UӴ_S{Fk3=Hh+tH+v^bGK#t#̏olGf D27gmpxc|6M.o?L>Oۣ^R- -2<䱼č/VX90GcM;NfP" ~ ڨlTsp^}VcaZqOW҇ˇOUlݧլ~i:Һr,]o'Ϳo{tv71 Tz=a2ߛlffs1 }7#}\9TƹwWm5lQ=6yt4^ȤOUd/.7_-wkP^Y$[Y2n!) 9Q+.S7QklG`)QHB ubkcdvqbd%.K["%N )ٍxNv_2M*k >C}`F9tJRTk$k;&Zt4g3޶Ʊq>st ߕ0߷]݊/cA= iܷY12*2nZ7Wn6jE^KhcBY^1lh.S^% Z^JL9N:A3& 2:,YdByݮH&(3\-`%Er7)2!V6dg-wu@-?bwt~[v)GtO6 _Փs.2^.C9pgU"c,A) )he ^d{+4@Vrr8ŨSf B"P ϵ@1 Pk5vv+ƴ m~תdzhǚM/J6pT_&B_4˔YvQWL=.Һ-R~,HdSMӋE3{q?80^{co/?ur3],%,odyل{KE~X'Y)Tpȣ J-iуlGՔz`N萿d *8%97Kr1f \\/9IXL 7J1@Vsu9Y'>zK\myKx1tw O^=[YWzpo;B%6运i n↓n9 Z݅۟fKV6rY?mŻwVnl _>2W; 0ஷH]K oq3-^ЦƐ $+g49]2pЧ.85NET* 1OZ2W*v:GϮ|Z&4N[8x8?+ ^a|/d4= .,id IMn CCF12 Sy9.Rd묭͒,3q @}cB54kκs>h %gu:D;Xmo|=9m$|ɾ3t+5|瑠܅3:f&hLސ b1I@'8JȨ]2G,KN\-PL'խ{}#bdsY'nl5KzZQrWgMlaʒ~g;Dhm49{6~w6fm* Nt%i,fQW%!I0S*_+W6mέ+WLTH(f̍㨱MᖽN:B|~8-Nb-}a`kkIz!awjhu|4 ڑcvl9_v۷zKr}jԎgj5_NsU$d|}y~qQ[HmY̛/ )uFaEzzۭ VܻJV%=S֨vRZa[U5Ukڪ.16fE戦+A! [LaAcR;OkᬕlsqﶌtBĺe[lx6܁G:'٥C,u8WyhAUWWilL;6?9q0,PX_?/]<_~T(iID6zv*6}^iDph՟ϣʋV41~6r˱`o7;q*cYD l[e  08ࢸ4y=< ^Njo^1z ne`DoN\HjTMCoJ~zvu?g1և9^75+5IRMk[P<d~iJTlղ9g=i8jӊ(h#0-c)p~zgڡ0_zDH^ňW׆,ɖ[$? F}3fOn n5kb=aHt;I;U7ؑ_Na~9UwсÚb7.9i 26!`X|(I38%]D j@R0x/=cz|9&mܮ TY e˝z, ( !(PB S; SWoo$7^+6smsM=ԓ6[p~A eJhI\YgL%%'*$JI }{?}WB{#Rd]IBNңt9:X%Oҁhk& mA )NJ$8j2w< _FL1Ȑ=ׄR7cYwvƐ)t!>oq]/0_y$5s|9]wnM˷sñGGJq0t(7>>LQ ⯕aQr\8MKc:J|Xp c2OgdYdE \fTA%\\QnEIrkReI/1BH{:1kPy+L ߢByܼ34BCcrbM3"l|HCT#7N2T_|ۅ7-KXyd%B5J8ҫ&Gg/tq R5cJW_#qZ'29h@`O e錳+QGd"6s'=Dc gGwˑzX=dٞ곇% f "e<L`Dae^ ,Tfw3TwDt1"vuo?x; v]Iv*s8tcb:7sQIdu9uaD8n1.}t/'q ڶ "k~88-j 5 u51r (!@*ŅsHUsD0^*Ԕ-AT^JUƄU:g&{t?ǭgش{؁{{?gmbcQ=(Q'VlE1]Ŗ2b %ͺVl13 $2uM`tG$FQN:eKⅨDXaYwvoWm~ I..Kp:s}ndjnp48vx*΋Ë˲WѼI%Gc<FN*0FT1_yb> o +nG8PFFpB21d I|aR0*2AQ%7.@  K )e9qYIO[C)uf,4 κs;>[B :eb<pR _$'͇`} ?]au,R*ETRtju)J/8}o,E41;޹pF<3֞GGk\76cc=koG/{ \I.v8|H 4)m"%9%X6Y]]U]U]+5T YJ譶#]8p'w NAą3V*q HQd<(O!*4GGATX8QO384i@S .O $vK,m+Y4LJ^/1[.@H0 )Q` 47" U i%8b3ˀVig@cFTt4\"=)ZPR%hbz3d2OmWϞ,}ۗ}{:ӷl];&`>\WCͿK P u\V!9S5%[x!$!&On`IE %ш4@DRbR-b1T4N#DpvIk*4}Y c#8O묆#ac;{v̖֒%z6.,5̬ /=pwГy WuI:#YW6+BAguj]<.xj̞C{5k8‘ՂZݵw |q vx˧:7܅. VѲBzV|ЮZro<8mPk[o?mXroyH1:WȶWWbrY',i#ꠏ|X_9Yip鸩Grq :ԥlRh{L-G2uU%Jc3'2T1WqkdelHU?iS@UoUp'υ{y^/#RZX"TqOL3U"zNEeU]zAe^)(9!=>X$253<p<Hъ Kq &fL28חY-hA3?6J,bM'|o>eqIW>z8B?(T 0dʼnUp탐rSMN'ѱU)AKנ[BR/8JP:k$*qJs4HH/t_C.qa8K rnm. 9޷}*9&GB# ,ZA)j.,  %M`(1v`lG 5yѡIzc,SF%0xʧ@2$EFt)ӍS}3*n[LVqNw@N>F }AMBI\q5)`Ȼ8<89ʧ _|a`hik"f:M<<<9~<^r(x<p :tttu $BO./:i QФDQ h'@?A\FBlm UH\O QvCr3V_W .FU6MQGk-QRXW䷪; !pT Sr;tjt{񢕛ha{WP]G_P82 n=uO7хM'|Fh1(-ڳ-{,P&nЍyy3kd.#{ݱQĽi c͈H^C"җB F-@I"T3M(&&Os}4N'촢q>hBWRտs.pf*DEe+m"o2yQ9A^z}:fq~uү5nbF&Dwr>\qNۼ^x 7Ͻe(Y}#e!JB^rD8}Ϫ)YQW@*rJ1b5:Irt+O9G3OM]u8!#O0_ G{V>ՒX g0E͎ Z4i :ygI2@dQHШ*@…i㹳jǘ2D*e C/CcO?_qRˌWx&WSbx}>fk ~KC˶l߾w'PȅZQ&xϴt֐@B5!,)#fJ=<* NFAGW3Tni3&K7hOT)GKbfy҆ oP"~Q29@qD> ˏ5A!ql+b+#3kcQZ&`,P[q 1Rio%$dܓz{j #!%T{er;e&EwOidHeXZ9?*xJ(T $:!K·;Aݠn+^HYvųE:Ѽk4?۳_~wϿtopσs:5rj]jjSw (,-|w^V;~Eyfػ߯|7g>e]/ֵ[]5bqW4~hJFg+ h71ϟ6> 4Xጯ{F빊R \@rӨzc#2h5?UTGko8}wp8}}۷lx?*v#ߗFg+mF {vYM.vg%!(/0rxt=@YW^wTmp) ~?ZըZ( 5|kj&NjuۦepsDM+TI/PSӳNP4꣒'S=7V阺Y ,,/oko- Y~N>Yp:˭_ ~S&ntkC-ěĊ- g[FW p6lThP"C{:Z|UN۱ u66JE0.[\\FhvRH 3^ 0ED ID0)=U-.lmYRa)Zc4XײBA)J15ǒ(oE2/[Qﳣ2p*()e^ sAw/}ojؾo^ ȷR4Jnü(4)O- g9Δ[;:+G >(" w8ϖЪ"puraW6Y'Ѯ`*BYβ*cˆJg܏N`QYj{_=O1sHWet,]NȠ8e$ј}_ kaV<~W`R&,H^ <]yC58NbGq 眆[MDWghϣIG&m䢊3Rȭ3K*-11""ˈQ >@f3;7 #$ F¬זiVҰGevl!;_Y]>¯ŦeGqyi;LOo| ^rV7D9DL)kk8,!&p?f8Ҥ#6$ըYJke-e$ 0GM`Iw MXXu[MHryG]. V]um< 1]Szޏ\|G,{>Wߢ*]ևg!?wϏa\eCc#zF'Z -(b_iu F1z0`O81E34&bS+R3E9@6;˜l%^\9+Wmkk}a MJlJ{PC>P0vY4a#A2?9HysQ bnEaJ} S,/5ٹ}WXlȺyec(LϰP,;ɲŷ woʮo.zϽ9ВטˇLWMWg.l}:v~[aV.Yhlۋmka>Igxg gg0N#l.KM$7́aE`b![c4YnĚ>m{׬yy20 yjI9XB+ p!]6r'!r*lhE9DŽRn&x92J#"(㎀NkZ Hfn5}(O-}keV<Ԡ!Jnx1Ciph"ؚ>dpL6:t?mF `hn)1<@$4>n Fy0#! 3K{4Ǩ#N8(QY v AXH)pfhp8%b"XZYh6i#1u6AÔf-wzw:jk2.'є|H_U ?\*('B!+@{ 9V\XɈ.JiHޞӪ6t/݂Cy5 VoɿBmŒa@M>ܹ▱'!>53tny$<+j'u~{`Yz3U:ctRP0.XSϘ8P=t`E NVF7s~{=IxZ aKI2W jyۮ ߚzei k"&jQu(Q.i9DbJ MyuV;X|{CY T4%v2J q-A8;$,H)VRKВr\wXGmw3ݎ]6R&;σNv:JPҮ$DL0 ˅FcGh Lc8\DDHcZ`x8`0$?M(B3ưƔ]N`)xRb`9.r)(8pNUKhѳ޶AR^w5b4?%q+vv@͑.Wk V_06Hc%-"zCAr% sI0ֲh0"O3} =+E< y\l.Ze,#Rcr$2aj$XeF\&' 3[iyn\7O7-GF÷7&QEr s#"ލN?QcEq+m401ol%ޣI/duѴX1gMڤԝ=ݬ$~}%4BE*#q nbp)" q*80QkI$X['^Q9Xs!l wUu6`*Aja:j˥bb6P< g$eޥpNP&a@Q!'Hr0EE sݒ5 4k|Sog>'Rbɣ^tVu_Du/ݎz{XQ5'1œ9Bۊuijz 鴎tN.Hx*q o d*AZ.S !-*5WHԒ h`ͮëRTOJ$Z\%-ۓ()n2-W|\RoTH jؒ2[SؼL ?^_뀬߈z٘]z1hw  Hhi֙ t|C_ptN?Q7IĎ5ޗTMk3 3.xt;#}Tz܆vBelA*eqa ri/e_hvY)YP(]Mʺ6?LĞ h&V83A°iR_?{^ʠ]W/ceskB&W\K`f];}}/#Ջ٥w4NC߷I?tz鐸Qəeo?fuZ]&ſgƣN-ܸB`O/ӂ4|`n5CEgy.4'x_,B:UyN߮at0G߾d:?fJ-Oaڅg?W& TOEZehآL>mx.yL}\q$Bv7]ι= 8ABڂeR!%+!)J"%jICFbKtM<]U]Tlh?vpN]wozwj p2>2"i7>Hz"+:hkT?-=ɴVmq5+qS= .B;^"MNi 4nQ!d\+f 3 -et-s,_Q",M%%vW C3I%67|.$yOZ*RBV {U[\Tn2.H2ôΰ1[e_# &$@,CsYm}3g?Ap/\]mͰ4'#Gbc5)#} >q8&40H)K@`!vAhu 6 :S>iH@(PSǡ8ㅐIĥմ-hY z5qL'pJE(4ROFgV>.ϼ{,7Q/~bX8wu< JH2k&m A1DgAK&C$kHD c"kZ;$\a5mf^i6-Y1pZΚ$sEVDr$_RBQm"BvBc:#1bB DzťYMn12-L2'ðO5TV [nHJ Fft(d4%ٓFf[HuFy" Lc.`AF4Ff!h!^*ar'(r}7(mv8aS~h#/4Ѱ<G'xwow4wo4PBo]̚6nv^yEl.mՎ0Хe)g|3MB9˺y[]U>3Qqg_wGA>bT4ֹTǓ쿋o'qfvvRa.o_c`.Q?𤛄7]a^^aLe.Ļ>voy[u\MpB1ʲ 7FYQeo8 rZao;U5V~ۇ )7}p`ZNuO>pZ?˭e ܢ3RI?עAFJfiηߜ~~f[ڜЗfkβ5J+t߯ o\ n(Hn?ڼmuVQ ]47rMSw.ipb[|۽t77^mo/345HW\8)Y50/^`e.%tǍm/@shJzel\+v 7NwI!SO3О1vOގY`#6&-4\zȌ(Ϲ@*h}0%݁R^EzUEz] bxxypXS앱S^N-ѫvS; ~4mhp+S dB̍u4L6>xLZߕ+zH\χ}m2*tרA LidKt9:P,OA@sXr0-3zR" Q 28'~ ڨ" ٨hMZW֓5@##a>ߒ ZBˍO\#bC\Y d:}?׫L{g[9zsiGϤ T׎>nWvmrvWrҨ<9"B]cї"+cl ZbiE/#q䬒\W-&5ϓ Q(R*I &Ft bUQRR2sÌ7`SAJ3ep)0VgMݧr^~%ϋyYXo#(2LD”v,:'¹Os{յi˙UlC!SLd"\c/eQ53eWKj-Xaq&N.6O6=/ǮEsH3-}Mlj`ڸy}[-VE5*H?.2(arP3dLS`AȨ1fd 8AO &I")Uak(~:XMR  Me,;,|t9YьWht[&0:.Ώ&_9b\2$! xb6 Id. %3P"g{2r l 0jnd۱B6U9̂68w# らԮ6;6Em[wZLD)P?Ze ,=&("4T&w^EF3To$5%idE4aML,(J^-|>p>fY :i*a5q֨` "V""l;D֖l6&$Iv'ԫ@;O9#KEK:*n=K9T}Vj6NIJy.8J H&9 0pjVnDĴkI \!CC84RRyviW-HZzs"f1@0ea3=*0V_Ynȝ1Y ֭XX!МPrVh!D`9d ;=3$dȡ"\M5]~WnWFH4-)z'I7sDGQԼ6d12ZyЦT[K|S٭pV]"F&8&&/JKyB7^ "}#cq%u# 9ͭ;N{uc #)2Íe[dAj8VRʍUOe}k&"6 gAZ Qqa5m< %WQXTQ$^k0K9CrgL@ B/v@<.t@'.?Ƙs(q&ɒnp 67wϏ|mNMS{1Zs>eŹTPʳ#5b@k,3iSIu Х.tAXG [TƬ%5Ƅ̽okjȵVn͒}y@բpB3i2=O}PYr=}>4;N|.S*fq:=,yCEIq,#a%Lڡ._x};>.>y&2uE!>}/&.#Z؟~,yg0qxo$񤝕 },J.ZTҶ2)33ը^wzݫN/-WJm"ɵGזWv`t]'ϻ寳@tm hkav?A67?c3k`6P.sz6N:ݧoJ,YWoP{S3{=8碞G_ǃro(4x> iG8 9j?uaI< ؜Okpڨ%־<Ւlfcrtrg3?Csҽk,W(\?XW5Y}]=EGipetçAE ?+*9!xLm`귩>ucwcpt>\\d_VA%WA2H yCfwkq 9P b7PwP.lWVoJwX)RZkb!?jpf+ %h/l6\Դp08 ʾPNVM>a);qXn?\[̔L*&Uu}ue,x4ŀ SFj Ϳ~2uΑR5aJBWIQd9X]Q"˽r2it 3>o˕茳D4El!3J\$crYDm@nwȓ3"L \f"[M5ޅdRCP%5A{ Ap2qoĹ{ϵiG߲jX)*7a,]hf^/DS0Q,ݲ]vVm#nO]WDi{IE۽h;m8(EpEs.l \ieH^ \#UnvE⒮-pUpUDW5+*1IZzWEJ;jJ95\ xv{t;XWgY{6+D70&nzZ mnu#K(q3KR/˴e9M.wgwgfgf8~P2_J܃]]mXsbWzUW®J;JP.:v+ӁLz2*J:k*AyǮBvE c m'}̸Hh /K%m:ClzL2vSʌzɽoZjgK埔3!pJ@1U>\F(~OuE-+cѽs\ǮLŞz|e,;18U c/hr^EǥqT/QDJWg]peGs 6]DQes(̰=B4J-R  :R")9IE$s2J̰4N+  $%}鱪4a*rO˕&ֵjdif1˜RO$fpj+OR96DPb? ]!G.2F^ -Qv)!6.2+_GfX{=;K!KHM )o e>^ b2 騀5–՜c*ue̥^ޜ7g3 cJ HQ zM?d cT3qwoޞo?''>/b<KI4a?-XvCD:NC1.mNaWqKfF~~rU (e"-R,LQΪ[h~" j1\-]cYگ3T-~GPC~8~pE5kE^Na#Ϙ` ͜:c ޿;NŪ 6  /_\cw\(捿vq9nlR*8ZTHo>|~n~gplٴ9~ ?&XgcE;vM|eesv`D8. r #^u*^ч4Gԯ20@cBsЇ0\䋳^Dʽt[7 [d(?/@$=I5O[y# LlzkˋYxQ˻q8*wfTma6S(\8]>|l1_asӭ+' X^9fVh9xH0'&q vst2r(1;)@t:ogh Š&{tyuؐffpԭ6Yn\A3L mB wEH(e1f4QckL* T5ZA6NA{hKTKk&H /QBj563EE`&B$֩ס%PjBіXFpbȁ4q,;1W \QMMzIh/U][-{&[H ^_P隚fl_ cC%d?jJ$L6"+m3/y_ᄜMeKқp>䗟h D[&N[kpP_KM]ש<R$`ɋa?LWT֫GyU1OGq{7u O 6(rəL$HަlˊZvRt3ڀ gc`bҹVs:F j+n~ta:8 mew{~. MLrJ͙"<%E61ٻM'(uxAΕ-mQځp:i- Df ظa9Pz6mm@[U}zfo ɐ!,g@ db9$U@+D""@vVoY5OHW>%rhXZHMIȏyl-kghXp,zd}ehiSo|mA8 3 5WHp -%&DoAs GG䡄#y0!3p4E[cԑ 'aX"r坵qAYI88~ _PbɚiX9k4ՙ7>h Wӊ&O>\Vh/ s/ r逴2q 4UDb$jZC(4lJb_U3%)vI˹AoI)gO& \W.A>.@PbkL'evun;LKvJNԈX`ډyq]Z-Om&&9hr ]Oq(8P=h:FP"iZ+Io+ƍɑ{Q; ~E}yؐ\(< l풄 l݇ ˖Eu`u_ꦭ`x9hpO|qqڢ{ב :pôŴF~7cgT[O\  `V1ϝg(_ il'|>f{-(Q.>d˦Zkl!,O, ?yl{e Mf_o>2q"edILI0 2 · B]>5hK k #l B!D1 m2Rʽ/<"WWPQZd<)VN"0+CʘtUfW塴OWƩN߳ ɴRc/%9:5+ />Vc7, @h@sBFG@r#N"`-8: J?8N8B;1G` E<`T#`I{J!{]RPqεז7s?&-T$5wS{4@ozqyGz$aEN^o!Ha;I1z1BZFD:'gc=<_Qȃb h0KI 88FbuQP 38O+'ޟ^ZX 8|d ITƠ1hBር1vOXo\J,9&֨~=!XE]WhCpH]@K(j[ٻidKRL<ph xC1Lp6mߟoԵ}Z\n :/ɽxܯ{﹫uU~kyvf>=}9PVF7)} ۑzUȦ5Jg"dITLQ/ ^OU(fpuc7;zz[In~Xݬa,16Q~cV*v@UAj.\\Itf*cSQosF( mj0ͫOeWf u-;30?ON>yzsmrB&W\KNBz 'n4s?~W$ꟿgTqp|zW^$r05e/Iq ܤv^&Wo^ND˘*9r17{7zw&y yut2N׋~(P!=9}G?Qj񶔗O=مGGyU6wf/zv-U8V݇翍߾~ҕU1fzLUKGUwʓ&oёjl7@K{dŢv%sW/} m ` Ŏa 5-LَAE?[ |9CAqGmv7)ƽO57Y$77"hX= 3h#޺$"}su@S{A=`Žq][΂5!ц3mjZ̖QE~3itwtw`H%QJ#dRkɅ'Ѧ/)Im!HjyԎ/$L/U!`i=k߬ևN)AK73KRUZ:IKf8 2E:m^po @&GA2긢V*f`/@ 0.+!kռnuQP 6SgL2v5*,՚@vXH$ LCy\)ڴ l  ) mPk8. åpE0@L )d4&꤬Gjw~A1]Ec+1@H82@j<DpƨfӀqGc{Dca~O%ZRim {k)ƿ&i+Yo1uG2E@?(Jp*aC(ˍr*gIpY8nűr2('#ts2>Qd΂h)!1 YxA#+eDBV"sL(Qm@`TTR^EX+\$֖wRHDcۗݭsd :_qn-Tegz h]C_>:DLŨnr+K*}X\NLEJn(wY;i0X`^L0`g_&APO[iYrKr;AKJ춞-Qbk(!J 3ҔG-XF#JjLI3$lgďW /N1+9Uü{^yqX3T_!mLX =aFi2iޝ9/CP7̇>| ےN-Z͑[v5xaUAp}㑢6B78p#hA.ݖ*|EJ(!Dlr3Md݉Z}Yb%뱴Z2ÜL '`G!wxdkcC_β`vU܎}nſk[JXOŅލgF-f9FEvX9n %6(Ƞ omTD٨h:&Ȥ#=\?&{_Wnv;T Hi:*fPEp'Jw+sN뺾FG㏣5B? ;o - U8(قP) kJ[Ԋ&;jzd28J$YJ?cc\s4*Ma< w)LS'Uo|X;n M1ǓOlٳ~7{ ܼ'Մ`~ݷOC]H H3> )ޥjg*^ZL $M0x~Up:/z_/[O1pou#/bcw3/L7,[xWT#WZ-jΙNFӚ|/7+n'~v,' 3^˓b>w=0m4V 4`:|-W yoJ B0$u΄{EeVYP˳[gAb%2GLL1u"kЎHY`&yԖ YŜ`Dt.۰m Ύ&ޅf2E>!k_2ivV&GJ@_q* l2Fo^e{ˤ4$sXhvnqT]m a/h|B[6:tOE]^f] ?BeeM8+m&%ܗYUZs310!t~!vF5_>xb2|'[l{u$%2ٔbϥ׃ZشtChpYp$XQK'+f E|g%U ^^b=EbdH<&~,ZW~ BX&aI=w31DpPN*YLYrS[F#eQM`&'GfĹ418G:+Ǔxm ~|ށmKsgq(6_L2j/t c.М開.ld5 BJ.~Mŕ o<W^)[u꼋ﴋԽ9 }EfA[LsQE珯=~ݵ&~!>oŚ9}?4>d)(R Jpq &VZs~(a| ko?|:OΫ2ZYFEQ嫳?j׻M@ -ͩ.XTYUҐ`VԊߪynz2Hp-L0CG*y p-,]~aΕZM?9wzHM>fm+iB. cPkj0i_<p!)jWҮH5Dtۥ4j'( Dt]+DZY"Jz:A:3tp 팺B\=]}5twz}N~p99.]ViP+]鞮zp!BFuU]+RvBLtutŘ%K զ+thm PҶEEt"t9Kt+k}/V|POWCWJ9.OH)Uf%ƶk8Σ[XDAd%U1Zs3?G: f/HeSهכK#HhZpE M\fmWh NӀRN%63tpa]+Dx Q >ER k;DWXW]!\ٙgvV F+!=]]i!QM=v4~hu4:E2Bk%u N¥QWS++)QL;FJBWWw"2;v9'+yG{pCˎͿJNEWf2=]T*ՂgW{Rw ]!Z#NWҪNSPB\tl=]JJz:ER^vl0aIԍZ& p&y;„oK[(XG܀-zx:ꄫޗp4+?EyEesxhPU~$ (󪘾_:2;o얉9 R %:&7$3f3ݓR˽fRr})Uu1Xr L4/e0- 4 LVY1mi2'ae""[zmd>u2Oeg!uɲmԒW+'0` s2Y&|G))u\lXw\lmW\lD+m]lDIb-0:Drjɔ]p}Q*c{T[pڍ4\ũPD~Fmг􋰴!BJt ]I-%m+DXOW'HW#RT Mg VJNOWCW@kvI]`Nig 2BJNRtI]`źDrBҴm]`Klg =vR]o }~+k^~/~(Y˞]=tuhS+]`ƻCWWղt(-҆L ]!\NBWVT=]]}T_\q{;X))2k LTV-Gqp5/Xx?87 vQ`soAՅ]B˪ďR mE q8tWpsBG,Z˜)p{ ,YAGM,n6Luڵ>VʦaMaBE%S*DʒYS.hxёkP]$׾Ț,JRyx/dp G[ѡ "3~Y% |ReҚt!dHXlOs\,ֆQd,(Us,ՙ6ݙDA*Q2v[T" STBfGFৌ9HSl RvES%}##+$]+RcPK\0S +lʈjH)B"P+%BB}Eल@4*,IZ'͋Y= v U"\ J{+kqRscԈ%gaEFV@jV YRBvX$(xu\]8Ɋa I%CLxe@DŽl D)%[0]YED' sVkO=0*eqx(&5$JRa[56Yn< ֢e ߪBӘpd442TfG28eOe]|`N (1. 㐔Yl\Sm |Udq\9 cU:B5DŽ BO[՛\ k UR%!bb$ ʐTl0`A Fk=I2L), {"dU' Mrʴ³f@Ld2*i.6<`dBnTz%'F0`\AS@y4b(VH(,`@&9PIJD y\UoMIF/@Qa᳈#vR;‡p@]0 CKp`R0 3Cغ(ƅ586Ud&/6R{ u&#%RI9nTXI5K!xX)(`~E7.PSJ NicW6!;'h2ZêIQ@IFP˒6&d0 Kŧ U* RhH 2BDjJa#`V6@۝@VAr`W_&M2:1n/wb] 140_c!*S(NNR!&_0! vLr-żM\Tm9w :/mZ$l>^"xu@Kq@82(}tEr+ ]-sc007n8AyE n\X@hRaAZ=("4D iLȼ*@&i. K{4mh{ ȋE%YZ{[x3 nEf6C‚չI,TGWgJ(̶Mr^V2Hlaª`~b;<17K*-VX(;Kb f| R;IyH!p9X.p /&]MDha2y(%'&b%saGY x %@3&;N@cRm>I1H;jIVWt5vS8v]qamLg 0ÑȢ(M621w`DdbpJ> ,0" sE*B@vt"֔ КK 0]V;j, DI+fc޲PRb*M{Ƣ t2ߤtABS4ye\M{-[ `=2\شMi\X}>ty3De\ ǴjYnbm$6m5"qPqt3(D4Q\n"{sGAŮcզ@15 UoWȊD$.2UxrQpŎ9{?M^2`\SL Rep!5B<: 0YY:@)ErO@ATf>QNڙZgxn4H5i*E H>{J{fR&C.` ZSY[B):9YekA\|5oA ICx ڀi#B &u0faFX&͠&0E zQBb4S"p#&D9a8AN=S{(0qV@Ō$pװ,LCqaIm6rLY#VJۥ>4`eSFvL!j6R>M`$ 1DN PoH҈5.ΛX~ZjOW4,[V(w(j-7V})6a^rdyॕ"~UuTjy r>Dj>[DKjωv?}s/0{H/r\E9.uI~I'SÜr;߾=lH;mw;9f/V-t^lNOr'Yh>m]++\]nbδ]lOwUx`Юts7_lgWq1wk]ngJ gbګf~PtgJGtUX;%u)5}9@/zw^ ;E%Im!2u.~3ƊszWnnyEYn~ kTw:HuĻ/Sx1ɻϔ_o 7a&ބ՛zVoMX 7a&ބ՛zVoMX 7a&ބ՛zVoMX 7a&ބ՛zVoMX 7a&ބ՛zVoMX 7a&ބ՛zW܄q`_J? ޻VU*Kwx:w%t<ϛcþ`0p{R@v}O*Um9b`PU!jCp콂_Vs[*>A +X}*8t]+nb)x-=/^A|T9ZԊ\'hiw3{=ߞ.ރ9|ktjU>S,[qw'S+9ǧ{X|[|}7Z^5_͗8ķʛ9Lp_xl/8v$lyXܻ?my^].xS:n^l!0^#s+.|24s/KdG~7?._.WߪǏduL5Ļvv|!*6{O\O/Λ7wu:η-OxVWox7'9tSl7G%4y.1Yޅ]6_/h志bk ]oW/HZ7!| ݋;9ڨ"jiJ opڍ/[C\|%)/g)}_c_QA OvTzq 5kOzHDv@ҫCbfWJS;ɻh3 n " FHϧSg!)5vT<1޼vDz>,9(8qL3ɤJ슰QK0 QP;L#i便Xw=|Gt(ޣ7}oORW׌h=w{[Neb^-/qlNӝO4=?]ުii[Ɓru"?H8Rwݹtg/hQD{-3~G\ٓ( `tzo2ۇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@}Pԇa@_0 oEu[i~9ׄ/sz;A"_c:wrmbep|(Xz'hiW[46jHAG*E;J۴+yK?KcQKd]%c;vŵ:[QN[R4lc#ޒEBnҖ)Hڄ-퓉xK.ZS~䛷Qk kmo t9d=ϛ1[[G;kٝO_:1I-Vj lOHY_$N2Ƌ*WWYMStLēߠÓY9s#L @}:Gozdd{ƭʢ_N7k$ CӢIPaj%U+qwZ+ʗF8\9!y^B )(^_4Y*mb7%X<(HEbH,"X$bX,EbH,"X$bX,EbH,"X$bX,EbH,"X$bX,EbH,"X$bX,EbH,"X$/BœfO"^-b!a ţgZbX,D YHF:gn"O%ly8}fϼ=ϡ~pFt@_ (L"=0VHb gW"C/b79.պlQnD}u?/V#hu7>.P]EὫEoЃ Zla*oCo+fXA9|s˾ 3r +.nSdɕoWamC_`x5Xyk{%yMPmm{ /)owVnZr^V6as(*a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a:a\tt+ʖ r}v]M Cn+kB|V t!ѺO"e@ġzq "%BĕD|z|U(2}u8g?̪"'R>mH@Cu0ែ }ׂVoƗ~x /1we<3x5TG7#R"F֚nkߝUg{U)b~7&mlx^M6G펡g%wG!1p~ 3Vtp7~2/);΅g 䵢7׀V; v [Z:Psis‰̙8WFsJI Z:?f {W\v՞CC:Uҽq' } m [E^s$$t!vqޡYuk06{׏IAdwS܇q}SͮCb]-wa]/'gufϻoUOL+U KF{˵'H`' $0WI* GQEM+ \+l_k`I('3 [VE.ϊS-qՍ6|3%SdAX!_tm(xkr9jMQHYuoY^aWn>WJ`\Vh,d9&Z8`&Rڑx3ZHs$(hgw3YEzg4ꝝUgG˱͟8G^.hzvQ[2ݓϯvce]oCQAn-Y7YG*O.m\ɍU+Ob򑎊c[H/O](8aҡ2,d?mN#n_tlL]}Wlu"Rʼ>g²܀Ө#$5mO: >X;LY^J~twD%\jQΛ"??Ʋy4Bc"I%aN.([E^ŵQ""pFY-]x$yϠJ;,ɲIM&0JL;7aiWXj٧0^sJrO1.7q8˟O^JB10<iѲ1'/}ޡ\NlrK?,˭ԎM؋/^ib]<YM㡝㋚uWfb} Xg#)VDB?럯^ awYOU{>}Ҥbt('` ."\_/؟W1vuXzkJ9O7_a8~n1t\yW^9i$"*7z:fc!޾y$'|/$z0zlx!'X3ؤ;nTdf9#W1uu?|~ ?2:XZǷy%2Q 621mF 0M/5ȊwGGG-_7 0ٻӱ>\ r8kĪ#rW-&ţz{]W,-j 4U?>[lxXou/.'g̓Vj ƺ4דZ֥}/j elKTGw/ڳӡ j)U:kZO}61[6F״⻺}9O/o4*&o/\a嬼w>NN`/|l;>.nm mMvd ̿pM3Y(SpSH9ᖰ$EXƴ"Rsly +8@Wc: b_ɾㅒ5!00 L1 B#!\+)'!B O.G8}[]n{ٜX:@;q,;1ܸ) ^wݙ?ukGfzO4:zQѫNR"9ו9?AXȕ2g xsS0$b B}L71?cfceL X4.^(0'F *a *, (:ՓޡYuvwq="҅;3xah3b+xEx 䭛cҎH;1޴A9\4?@93 5kJtDy]R>E9"VȽ)G [n,A^Ƶ!BK 1,Ft{ 1rr.v1-%`μ1,3vlW@&g ޷ 籍c;jJm fe/[)G~4ފB:wEִGJ€*oLTTSv>iMe4I%ap_Βdo}@1Mf(}?l*ۿ~1s c2Sw^v`3TnQy 1n0#Fx nrsb {al/2ZcmQX8ꢓ (ǜ5D:JIA?.1ngpq}sy0Uؖg_ ʮ @e^|u}e_C'qmy;8 -7ڱߜqs)}OZfd(CAT[K;a\{ ̦@=UhxM-v;I{U/>O$;TcdRTrZ qL-hR^gG#TΛlPmbG!:BVct}{fP q4|h0:Uk9[sI [{k&`3ˠ%S\hPsl )'bo 8X꼧I"E"ĹJɝC '4N!scX„C@SpR7)(( sNermĵe`I`o#k!}_&ovcZn$۵(Fk g'+xH@<,"04{euVP m !g &Ӎ'g.Vѓ$&y$rxm0RF H[8b Q03Oumqޟx$tCp>xT$@C>pnq!uO$XmRi LsL\ZG4^]vдϰ߳ t Cky運Ap(v[\vv=T)juY7+p4|j;|Ym`׬ H|З>Mȣ73k sŧIͳI_rp nl X(&m-(Y#~_&.sêf3SXwfA3zeE)7ѫxoY=RXxh's9D_._O iϲonBmrУMҹDB{:Nv؟}>?3ʣ =zWN=gm+l^c;]`ަ٧;4ϲ_Fhd_&ׯ>zӇ/Bsyo/L~OuA_G+,.]}y4#gy-ug=)^WR}1GXv~hь<'̪wf.{ ?Q;b3UK[ٌon?nU 2YSREHf+G˛'m_%M#XgѷP_BxD/`X4VS]S^"pԤ/A;wdcJ%\ƾ]P͗ 6<쯥 07`Q$s젗`AM{懿%3Hv[MQ9*i"*hMU;ݞ,&Jtq|z<ȖQL7o';6U;zguo-왩`Vxj @:If·6֣66͑&{j-_k[{ {$ɠa:pGӁ`ԝOY61|n~9bʏ1=HhI7\hlCLg=(q瓚Ѥf$G1bsp+&RrV9tqD(`Zt:+"BJt T]JmUhOg])!E8pY%x'SA tVggrp>Ad|Wۏ~B"]pY׎[@h!gE{t\dDLR&GD|wP30s:p-"8/e/"qTZo7lL2[LÇ8s EՕki.ݞYΘ6c팢 ɀ(ęBulcđAr"Įu&q"~:X8X֧jɶ\sQ\Pg\xpvYh #Rq20a:{1 ..vV;阇[w> ù-V0; 7 x?*+e)3^C 9NJ SH2"?~|}C>~(xdz$_lߐ4?Mz}ݷOS ,j[k1b2$*{`[v֚7A_z/ ^-d9#C-]-tyM]tx/L^e˫hxneejpi*]ǣon3oZ^=ઠΎ/g|on |1m7_^:U|t88\L7N>"߷.zp* vS Ha}zpƙW$jpErWV0w\J\!,JɊp5g0HrWVT \9Մ+ \\UuEj;H0zp%ٗdh3wscjCFWub'L`+\IZ\;jV"\`+5/O"\!0"\`걮HZpEjUqE*C \IZ4Zu%%W$WZpEj;HWʘb~:r-̟0u1Y:ČZʶ:F~M}!3z?4Κ]gͧ_ͧ󃧤M?O c&|e :zn %rͻ^{J,Nl*xo.ϛ_I<1:Ҡ B3qK|r8&;mMDtk^fOeثt _+6xo_C63߀" BF TfP[X*eU2PD&s( D暠kPLJ ;r9kWQ6fw?w\Y HuE*-pyP`z(W0V H-w}b k= 7U1Br ڸ{7H}pLPZU Qʁ{Ii"\HƨDT 3jq07 ( V Z\Z=Zn& +QϔQ Hm oWW(rW$WT3wEjnF*0wr+^ɕ\Z QR¢Jw?w` frQlfoTwWbնEϭ6ZV+l T+d-B(=WRW{+IpjpErE5[WoW/+M6KlYF)k}wF|.mbZq TFİ OzlRȖ [-mT8kٟeY:&;_\U!6ێEANTfF 9udFܻut;=k}4LZ}4TfbZ9+T+ $WTcUZ7rqeo;^ P.\Z}0jqe(]PȺ"/pEj%;Zn1Z]PXW$wK6S4uJ~aC{HR~#\fjpJݷ-z /eE"U+`uLWW{+ 6BW++u-"ﱙJ5j/qB^(X?\LG ?FהXY1E/] {6< UBǺ5 'ܮ8Ojнxi2=:=0γƏ3:Y:}<\oo߾ź7LO?n\HFyN7Zـg > :}.|}Öq(+Ѫ..*j8*7Wax/|w7t% ;y/C]?ē.Ԧݦ6?2b}8wBŵە-BO+Z:Y }5 @I<:BxuVX+lZtj! A&A€*6LIphz+3hz_BLbk.L 쓁4/=Rdmͱ9r>eҦ]zm8b,ςQ̂º* J;)^|9Ƌwm_!ξ4mO4 !0iDTQN,zآ$;-' &ݙg LS۩;y-Xhm `3hQ9b0XJB!ӱwFv ^kg}r<gX{V3˗`MPG`a<j<_Ɇg'EUw~eێ[NZ+l%܀+zd0Ȓ" {dYe{_D :wJsIra<7Gk2#9"GB9DĄǔ9#@v;cdv NB*Caja(k5f,`.#iM WZ":;;#gGX/5fӫxf &p7~PbqѬ^W}U1 y"; zu x2+YkDn$a'&BNW]L6^Ρuu^=Lf$3ͳYv-shܺ{Fϋ;Cvyf~H௽fwy@ۙ~E<ηt\ch':mټ~t뻅6jR?=v^i/!?6DŽP/l9gc]RX}?XVW>8X@e߅;`v]JTqn(e׎Nr_c&'91d32}Wh| 4Kxm)_·k^j1kF[5KSUt{{L,c05 l+&9NxQ >^I9;8*YgȲɠ!CA4DHGd"dVC@2Q_0$ir.j'iۍ(cZ:P3AlnA9E(u{^`Tr ۫[É!-Nq)->;5 $NHYe4"RXBI J*$%ҌNrE؄ A2 ǨU1kQRk *zqyqXCgkӃl` =~iL \ |U-ȶ!63[w7J,*sR}>9eO%ꥻtb1DV1F: le M )Ocb V: X0묀'9KrKoC6=g(AU~@8ʲէaK-^ۖZQőhgCz](CAܟaQ Cn_Kf[n[r-m+177z KS^seK],vamIcl,rmP&#3ŃX[NJ/*sHH ,g ( 5 'JF+D"":uy`rv1g,9/I 6sti׌JG}}u[Gd .%ߛu(O^?Lv`+$8 #~ 8_ܷqD8BEeag(245qp:rD KQYbB .(+ ’E*LkgѣL#S /3Qb #l B!Dج1 m2Rʽ/<"W}Ŵ}TL+7C򊪻yEfgcUIiؖS^mk>{w4" `\E ˅FcGh Lc8\D(_kh3LW  5 E<`TؘJb ,Sd,GE.%ΩL;93Yo{ ^3O E%P%w32>Kݵ۫Sa)U<+$m$Hc%-"zCAr% sI0ֲh0"+nx:;iU=4B,\ EeY*xL0@"FUiD29g!kN4O2׍&?vثkoo(M4帍!F"GlEЍN("޸S/f1w& ^ЗºjZGC2u+kd7 l HNdGRJ< pfpjqJ( 襈N6'ĩsDq)'`mZ{E9`yKi5*ntl8~9Efr|W[$*[lR٠9/<ȧO8:'ʪ$?e] 1W'N)0gЌ:\!b6Lᓥr+k>\x|\w{ R [wl`YKT%rjCOmJ?Z"bBe2pqX$=^2?.)("%Մ? ƠHЦۣW/ݏi?](aZ&1_O߼ /Yi˕]<޾}G)1`Gۜ!drhȥ$ՋGgc7*|}q|a&YߓꟿvT7lGĵ\8/MdNf|='Yp1]TUh.>IͮR˯3~׿<4y.&#'~4՜%Z~x%}-\LRץT=_Mx.LW{:,]jwe9Y`qgrjCp$halcƟ|}ƽ1fz ln]Z WMLjm L r;5[&n絷tmn[bU]$>4Wjen}9KބDD=tWgO7o\Rd*  NC]Jh: NcMm ̒n1 ZaWqity$kӾ [e5%l:*+X`;0)q흣Qa V{@QjBo9؊a%^ y}B{moX^HF2a=C~)p<,B7Ol>Zf=[k]'rgDlYe,lZe6,Gp\^e+wm*Q3dΦƅx9̛}IVrGuꃍj{]7,ŋ-j,0r%kfZT|~9;/XFykr1{' ' iq&Yg9]^b1 $t+_|SR:Z>r;1[6F׼k b^xr(1Y)` Ջ7:6*^V": BOj5:1̭oHaJh+JN#,jEB@ Ye8&ȄHXPl=>oGGcuHΎ&>QBj5/{WFJ_nR|320dH/._[e[d[qc[ESd: G"㠕wLz%:QO*ԩb:Uݽ۪jĶJof 2V!C/2Dt2 bn(p\=eo:_Z\@EЧ姄ߺotzpwhMiE(2S0RKt=oKRa8iN7uu\~^"@ l-p"hu d_bO**xUtV ݻИ}.ґ.?`LgDhmej[9~.eH7@,crIYxZĜkԕT#BwKv#'u¹c翨WsΧqqjL{H>zjB^yK1ܙP晊̿D?<1'm QQloy{?|+ӏh|pwߏа?\n{Կ?ݤUtU$%% E}-M3lg͜yG9Q͔|M, }.2 z𭾷]v[\meTlTz~|m*V?^G9{lfW\%~r<{/fr'{PsgWr|nimhmLj&׬G" 6r"*k#B=WrHJ@Yԑ9ɌtIJN!hJ>9EEU.[YJ&oEdQ4NScp@ :g(@,D(O'c\L 7G.%1oʔuw_EZq&J\P0$(j,\$yY 5PrTE/lYo͇9n2vdlv|SMWvɶ).DGR\ /7M֊6=uq>rmvIMLW$SzxP/E *ycpg5Qj8Ӱ IZ/$e$j\Ҕ"oAx&"*#Z7֗8e=kԜ3AV R6!HFbGr\\R-Xz,{yՋ̌WHj/6(Nn&b0Y  F׃7*#N%ZM',ET %8I>P!IMj!;{60C'7&ʣb&ə2D02Wl)q# Â.ۢ)ڦG>z2 8p#@u`ќ(BADѿsœܗT;-UN 3^Q!քHd179u IѩxXLx8cKDf .#⣙g#(ou:qw1XQPb\ '+ Gd-FSBI*ʃ2θ༆(G=z I3Km)&zDqdV|KkŴd[\q9$%cx# b` QCQ#LKdTD=.Ŵc[d煒d^Q['h޸ud<&uIENu(:铩:DNX8d\$gV7S '? x=I&XC/ B7 bԚ;  ha"\L 6ifrLռmo&9 jᵈY_FFwQ"MCD1 < tR5بaBh!:nSTB+D˴\9nVYk CK "}$V(prˉ9T.sncb iM&9K"P, {N)R*D&qk(GD5Xd*k$PvJȚ[+p2\~E3sѪsiENfN)jm'“W0&me[AxDha kL8.Dx@ y,h5dk&ِwEj4} t&~rCg~ROx,ޛ~Pe>=_ PN-g.$#!^3A: :Nf;Ä3_d U!N3هZ6r(xΒj} Yos5o9Odcֱ)٪bwoZ?/ԳA,8jZYu3]_wUTp^w J88>kKLZ h JP\Xd迒P%Z3+a`E._7`+ݜ&Y^’s4.>Ufi{e~]9{,6 |(s9$ߡZ5  4q0;c$:'-HBd&|j;Q”eq5˧XݦD"2 XrSVx_!ClBôh~зcay8Iβ/+ mD"7[+/@V ׃s6Wz :dm7o2(\nn+o! >~- 5(&DQR"I*lʆ:^2Dv.Yu(Yub՟ou!/yI悊D:ɂie1sE%$AbTlTI@uNex E"XXy+`$SuY`8. $ڼL W?p#a?y!G:6k vկ 8&_K`n HLW_ &TzL,.Rz$KUKd)/=KHffW(0`*`Z,],=\BRIyHp+r8p=v7`zzp ^DuJr(pդp4WW&ǒ\e*WY\IkWYJշW1F }qzT4\=I\חI_Ӥ[pO+jۡ8< kz0p5P tpJr-n>zj}2f% وPFy16Fmf4LH+_Kjs\gum,:oI)~Tjcn鞫qM9΁@Sl女6:IJ 5Qqߟn|w,Tn$ <3,Lg5~ߛ.ckޱI4(iq~c|'y>hr'`!镜1 RkH_w՗4X28!Lm#E >lqCkwDGYP҂nv:;UٍTp]uZ=|[qv($k0/ &/r߸(I/4\ M5՞׼ ([H9f%]N 0ƹh/hϫ->F.TG'h@fNMmˬ7M{6mi7(ۑ[cZH/Lp4=7y{,PJ7pޓ EԠլ܎ѷ'fx)+@NR7G]=,넛<:ap`P $zW %uH` >a nfm[ gʗpʾ# );BJLv}i]zO%*&kK@vQ%& g Cm+my.u |!U @24_[ KUJ4mf96yD!9ls(ij)iھK;K)b|CL29\dĕY/v zp@R>8p!K94^M09 (+%cQY c'viiae~]TG~˯+b[Λf8{Tg4?LϦGA}̏Bv3GxQ \?;/fѢ4#{/~ i㰾wj/[F<9MmnDTtQk]KJl ϱ\4 %jNI~8wWf##PR_(/J %BI}o7<<1(&j/J %BI}PR_(/2|ՀҦ8_] S1DE$`Sy%B\Dv?DnYFd=٭d)OdbZ[y+Bw^K? 6ֈ (#DUQ"ˀ9q]6 ,h5T"BZ)LdȶM-5LC?}2MmN/%y;Y-Nyn2էRgBs),CqT9ㅛ8ë Hhw19h}.*Uw}Eo}ח]gJ82x_6X} 7U\hx{W`Ԛ"&$A$%GqUBS2bXSvh5Z`@PAs_lrD`p'FLYwvsF=qw͔xd>d۱e (A\#GB3vexI8[KPK".N7K-Z]3~W! SNO]xMdz(H8 S\f!i,R ts3D]]fDb)%=gX&ru@ms{|Y#౏Dbcg I( hTT^yt{+C0!r eZhZ Dˈ)@yAF䗖κ'bds@ˣ߲YvAn#3Mvr]NzjEj-L+p~ŷ򦘅X_ѝ9lflAd8xx/v"?$]|exr>OUwt쵲2aDD&;LjnDh:]fI)Pi^7d٦ }(%V==i{W?(cZ:P3AlnA9E(uC)c>g{SLѡ\kvσ{sBH?:{XD k#R:)U€aJ:I,hihHXG"\JlB Oc*5(K)µhAkY焝ugO0GsTNcRԓ@ªyկ;K}z5zUb S59BGEtH9!8)u\V)4FƐ:>ztڪq W^)ǝ6;]\.f9z X9%`*FQjzf9MWloÞii=?CX͉L<:AƖ3 ȍV Y<itW:Q,nZ3hmS!hSTiTijR~nԼ>v9(.:곲('Ov.(@\!r -%&]#~ ^ tX%<DL BEe 45qp:rD .r坵7 n ^t"yn0c/n GO_PDi qC&D؊#?QcEqhc{0"0</uUh}. ԝ`rNv/u!ׅnW/ꔥ\F)f@.Et8!N*#sM9k+1m#$}avN^q{d w8^i毧x"Hx8p7bq}zEviF>= }9PVD'.}r=uJ9`V:dͰ_,yX7nEOo!~Xr1#o9^R՟R,U2UkqJxNk} )*yh5+`%(Y-ł%(&Y~qٳҙ tXMz_0E]̆6y55վ|_?i3mu 5L=&+?YuQ/xw_ksPmzB&W\KOB\?l37YcE}]^/,Y_M6ۗɇIbmz7}9c=)ɬ8]_?_7WۯI)&9p=7{a0y"dԇo`x3R_~t#u-yJ}T#]>t|=Yzt_;V'6?o?<2p-]F (g3?!ǘdꖒHh$x;yxI~H55xWs.n|.ek N:yXj|#jݵCѮe7҆&4  r;5Z&.{wTe~[ט"UU$>47[juewnm9Kքԉ.'ykvF|0ec%ޡAHɤ֒ OՒ 픎$jቶO$wKZ_[&- >sI23eHNP _T: BwI-Q.*&HˉzD.^Ef/m+ꍪO/5^!y: ^Vmޭ%lʼ[IuGM1֢ G$ ӓ_*ӧ^V΄>s"s%ENpWee>ʔvK~s550K $N hX1;/hr^EǥqT/QD賟vtUSjbS-˲6;,0GQes(̰=FpP(@JwlÃ>]׀ļ#R"%!9%̰4N+$QtXE*IaG|3CXFrl1˜RO$fpj+OR96DPf=? 7#SCmM0#/@GUxJJ Հ=I3Kgn>?p,F0`IP5QHGX.Sm67vwڹYᗂcXVY]ἴ.ʼ,8fQ:sefx^9?|_b<0K+K/ʟ,MӪ1Qozޣ]NrKn/-ՎtjSz}=(\:ùim*O.y3|R;,WE*6J:lXTgީ;$nގM)cu_߫I]'4.RnRJ_`50iEKYe{WxGg0quZ39_\B1 MeO7ɦV 9MTWku׎ Eբ_.Odž& ;mT|YTDxkߥt >ڴXMφ$94eJN?`-o3NXe[, l[e.,Gp\d +~9==;Mcm*U1e\L z7&ȡv~-M̏Q Y[4v *%har%5N?V%U|}=YFyr1{ Hjyi4$?r-3oCYr@H)_p|+"C11[6F7j5Y7W6z+_N]X\uֱ V񺺊0 =V'άn2wg.=3g)V(8Ꮀ %Xf]BOؚ "aAR# 1;#wtr6smVW 6mR$ l'8gfoo=rly.wߪd[n[QE˟M*vE/Z/CDa4-]bRХlrń|^>Uy{C>h4 }ĬB2V{dź6 /4crC]0m|@libD J:wE~zrXsu=춋޻8Nў#+:z}uJW5!VpڝI^8ll{V `ŔrS8Hxs3AC Ut$-v\Ďex -j!i8B2hKF%:(g-0J X*QRrcN!F/$CZ,Cׁ4뢲y'=C~a*o8۶G_1KK.fPVY(ŸљTG4Fcs a=;+mf@S1\J2I)mi#sОPr>?Q*GEŌԄMZˮ`dcf$~ϖﻇyjr+إ|\˴u_ ._8]$h&: UUiC C& rZzrPbk-m`g/d%t26u$XNZg_6V${09'v\-s,ltJm?0Dɂi-Ѡ/AD1YD8!b+ɿVd[: [_5օOEC.XHV>9CB̭"S]\p09ao1BǎD\gQRLDl -d1FrLMǒC@C1Rx6XU>B;pG)8|b:2gb bS"OZ{ix|o+ՑxI=Fɮ\sQO\Z*Lޏ[t&+Hqj d^ҀQ(N)ec`cWy~ 3U&V35AYF,0.ApS㾢x.+_WPc6{pų:̪}`boy1^k[^\`^ QWQr>;j\,Ǩ)MnYr).'~+IsYgܒs.o~/f_ 0!}lz;fM-l4ѵk+cF+UdžMMv~\}EgV_~Sl:jsu0}fyuqv [hZE#ьF. qb65mݻ~}4Y=ۓÜ M+[ـdcti}42za4S>%pQ?HNTT@2]~"I>ZʒpکhGG2e)\ 02фoLb֙BVǙmw4_L~#EBSr F"M~fpm9 :@ƅ8ŔcwMigd#F%j,Z$3T_h5dszdKP?̺FTD0#-nVUDrzZKе!KU&KG 9Ubm]'fbOO/P =v`><:g)g/;w'446@tC&Z042N(Q;QNvO #ԄQ$jumJ4 l2Ŝ= '9'% bŮxf6!i`kQȷ m 0IF1*ƮNvRσeInnzLQ! Odl sǵv/,I0i*Y]c21cbe-* !m52$jdX-dES!8y,(lUiIh^..:#;{e#+fvs4}٫? ~:mKcJ,d5VЀKD2slC}^7+rƳ;}OޗS?c~aOxa`kXUOi>%qk#o ZdWrv!G?1_OM4IZxFvu$!w=:h6n\n 4TC{G;u|m~\xU8ɃQr:SˈH*ſ:}BD;,9K[d` 53m34 s|~?'l5VcjV;v[Tvv 5,Z\Ƶ$uabO:@\y-WVUFԂ+R;X% W_  qK|~WJ_\S~pOG0 \2vz +*Wq*puRh~V+kl-bƎ+VՄĕF+ \\#Uf fڊ>ܤv$z(72'olU _][TCAO jfvXޚnR?c'R#aJ WYǪdՂiVwY9[UIQjprWqE* W+뽳59'W,Zpj蟈J)턫 ^U+La5bԂ+Z;O{v+/ŚMvzدkP?SWrI򯛓~+f]P 4ˋQ­Ӯj}˲leR|1~+cOVu g\ Nfvsߺfj5Aj@QR#xt6>eN:)V{thdE[ ,׳@rfkԺ?gfz =ĭspE:՚?g&83WVYkzL;'o/NUj?q49"\YW;RMϙW5AqY.Vj=Wҋiq[ la3h(\\U3ju`Jp=ݑWo?U{>0O-qpkKS+jW֌WҪ W+Vpł-T+dԢ;X W+`FP` 8,תZpEj=XW J1h~gB;x ntϋb;UUrӤUGJ0}6^8ŶT+kqYU*"'&\Y=;y@jc#C * RV+[O8v\JCĕFJW,ةjpr\Zoǎ+Rp .wz?x*cspծ]/2#Kբ\ur}-bfbcK!>Yp+•NW,ZpjƎ+V9 Wς+5zpErAT+Vź+V 3x2@ >0?;)?{3z _0b/,BD5%z S+x^&tz|M_ue,Ts—`Q*aIQۤj1}FmqBj\Q/fԨkYF^05v5zVc=&)'h=ڭ 젚 L½*9[V̴!r"F';(Hz\S-A~*qeG+ {t?z)Ǝ+Vĕ MM j+VFo]ʱp,B.1ZS4?̗X Xֵh?SAʃ`* :ij;L՗+Go$J}'⪟}%>Rƅ+W~ծ]/E+ ~rԂ+V*XTͺp,RV],wO՚UZ?q~ޭ{U H8Vq*V`+isxݮ _^yaI;szeۋpqL//ś- W0m@8h x XLX{\#Gson`x,}8}߿O3q}Y =S=_fO=>;C>A|o.n\O3oeAC|Mƴwk"8p-E:qSogIw7!6y1/߼L"l+(֮ɶ*t :`TbEpDig 8,qY'~x37ZM/GmeV- !)oN.X\ s!J"Rp&%,rhzl^oesSyٌ?6u|#n䓛lvwŽ^C Ϩ{WtˇF{8pWY/?gW{v}c-f`6.w|4('e؋OjS[SbI.%EK0IVطd+&yሡ_m#(FR6VȗdQbأi6IB%J2VW6Tce4syjaxa4JɄ` uP Or,l=Ł=1T\ &?_{@T媢ZD&D& TEblY/6[#:/+OcL~ccpഡQi 4p8 bl [, 슩D/1vF1y.Δm56 | 59y z/X6W{zgy r`sv^7S]D/j!ѻ A'2"oqY!|Xry]"q6?~>b^;L.xޯV7ND.g?߮Բh5[ A8mwˇ?[.]`:7Kz议.>=\zu~qFLlo_jdg,Z}9\/߬'GCWǛ1R1I,py-*߿,YXeQ5O^{*ٓV\GzZԴyQe/g#-j~ bo9TKB9]\ ZYs*b%mل֬M5&׸9f3%%SO{izU9,ГG}6q'NhԤ?~#0ͲE]-<~~k~4h{NpU].'WGcCOeL;#Oj /עS0XokiupJ6/p 8vޅ0"n75eq6AǶ6I S 'S/,#7 I6F[n/" d{"DV#Oe|"Y49i()6yLFڄGɾXM$j-ɤcJ+馀8ibK1lr>i7H͜g>~yH5ӉO8Ǘ>ߎܝ@m̈́ @"ӎk>Jb kH:Ue5x#wcw7;poכHĤ(V9]BHþ4:ҪԄZeqķRZJgC5CSd0 qWb-G_#g"Rc#hP|SP9kSOf28buٓڣyǩFF/ )wk HLlRE:2k2*FFwɩMbu>$M% fH MRUvj5*W}#hõ΋iGH_)l(5A&S-1 ZR}ΦEg|q:$UcR3q ќ'StT'Dxi m\9s,TMRZ׮Fb. O\ ;|rU9jo19Wfӕٛ2uq3^I_\(Wׁ}wiY?il_]4Ũ"w*gqS}_N=T_r=X4͟]qWDnn"D$y-|39jݵ%Fbԟ59 Gh]?e("7 05\8g4F󋂸ٳy 28Ӽ ~Ɵ_yh~ {COď3ZLcd?]\RmU7S*%(;ۜDr_2ޭ&i;?>vQ._T3e2.{s9:"e\ \%٫5x1ij5L6hTltԾN11I5z}E!iI\n!b>K`iAqP}s(,1FJY%ϑ1 zZ^ 1Z1!TGkP!_j\DoV)E7[a2Y9JR-$\cc&g̘Ʀ!c6]ؘuk%kj9sVUB)F| x49QߨZLáMؼ Qȣa[rU7P:jJa0 .1;+&QeM,o94;٬0ZIm=D@?Ä##6'U\PW7.+3l zڲ# ^!E$%U^4>M&PL^s[K9UJrVQ9\ V0ب# cadF7W|'R`sGZGHc5"DLH˛zC/ҵ;clyX92lr6ª(1US*6f| "7FL>y/R.Ac `ԑ%EM#hdGo /٥$jS2,ϠV +]ߥٌi,ܢ@%Lv+,*T0:B 8q 3uq񣺁rT˃Z\C  y28Fl5eiX⚲c؇84JF#5-uy!p&HWd:@X8pigMW%ɃJhhtU@sc $8$;G5:/cȃ $!2 #(|%i3]6=h\=E" 1|IGPVhݚ7^`n+=e,TɂNU~d}*yu1ǝUm+dəjf8x:g>hC$*sp]/4LG`n4۠k@)@zg D=QM 9hD;ܱ-:t*$ВvN$Q9HOə%[hH"}Q<$W}acm4\gTỉ2* ITT пڐtbRZNP g S+ ~R_!ud1s خ'}{3 o2 ޵z/Cp h0ClS Dy70P7kfK3`SB%XtC]H`!{7Мu 6+}VEK^h.!fR (ytA0%xEAw kkj06QVH࿀`W8d; 'g.\ֽmDX 3 > kX!p !KqT F\h \Ֆ‚`@Sumh1IGGb 9Qo," T{jRAW2e<;J&98"ɕ*RQh ԵްՂo{eK*:^A>׋7!%1;Uh HT p naPR$-|Cz/D>%(nQ1zc|a840j"A),(i(!e3)kMbxAPiP/Q*tCBquUD`m cԬoR#4$_uA%Z}l(Xi5&B8r=,]-튀LVi);6jpLm.nL #,/Fԉ*2hzT^FC2h90&^Pe]nvEȂ^?o|jN)񠃸L z=GσR*<译6yWF$ 1a~s R~^gՋMIQi@Q,@ `7M%Ij Ңo}9(f|p5r9XKy:}?vуdGx0&& r[}H; k&6Zlt$NF!: h<̡]^OC` ~s@߾_z鮲(:?@ϋ~8Cm2U# 5t=j^ j43{)Ep \ Fp5\jW#Fp5\jW#Fp5\jW#Fp5\jW#Fp5\jW#Fp5\zNW;M\ɀAZ'\ ~jE]4Y'>zU$H")RJOo0~O^hOӫ/@VO z~)YcDht6&G͹!L5xK)2u`zhQ\uySJ/'_ނgIoz`Iz}|_~Z4.@1+]KBޣ9]_Z݋VO-}E"w}ӉA AT("uD#r\G:"uD#r\G:"uD#r\G:"uD#r\G:"uD#r\G:"uD#r !A` ׫dʂWi7 n)rJi*\9&#'ducWr9Lt )#SˋMM%BqDx02,heN8=;*MVq7f*[X,$5"X X X X X X X X X X X X X X X X X X X X X X X^OSb ; מ`ꎞb*Y,J6gx5gHpT$S@_ AjR &y8*:n *{&diQ1BdR>qø*k 4mljjT >oQ"!8{rT,tAڗ;d؇ [Jp짟I\~ COfYyGkEHi8ݣ _G^Z\λ>aztlo!w6ċ<ґK_]^/-RcҭW&к{ښ,Zs<{uݬa=]В@K\wkx|Η=oܽɎVOwM,r~|ٞ8yjs9cKs;nN1na\c< x<c*f,/aPY:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:Y:tOS;kl'gCR{fJ29Hq0qZYP)| RDަx&9|+9ZXڟ}v9Ƚ24"v~g}"PƊRrv,4(D BDJCst (߳AG@ʞ Ú*i{MxZ|N16'kb*P'hJp ֐ఙ8{Fkmz4򝤿@lUIMlr5}]m;u+}qVSiʦ殰+4t6L=rxŬ0hn) 7H3qA&x\s`#ת@\hLa6 .V{-RVв& 77q|Mtˑ&z:JЌ%;0cE5Vth Mܿz F 'd: {6ӏñ_z $U`Y \`wkq+h(/͞FNt݋ܮ<{9!Vxo `!Mr\EO}"&ΧB*3/)r) l9}ܻ\,F~Q::OVs4Lm^$tkqr5Jy'^>USy !Ax;4 rM~/xr=8Sݾv*hԘ?֑[ m1o=nC{EO<-錠igˎ}F</Gcuޢ\Q^Fp\,HZkБ_bmܸtSK}񛵯&;̴2ly4 |%˔Ff-<  Ez(:gF-53Z Ît{_i狓=9(A4>?4ifңvv# >[  ,z@~B )"\qK$q_WBHfa\{Usk5p!9hdB&㣑%hF= ZnCּl$*XRݽ,f]oP/lT=Q[ʮ KpS+BZlBU5HI"I"$x$DQV:ʴY#J6 W7Pv&t fmuzĬɝǒ3|=,!>XPv2}U\NJ+鱓U)7x9moN>YQgRጔ U4HRkE' ZZ=&5S `un'wRcF7{yT>Rʻbf&QHS EI;TFeF9nt;tLA w`j;xvwśL/֦ )Lyޓ[6%r\DQ 9\v(U(]"!0J)gY4B];Q1bz _4[Ye&dNPjJSHq9u(/,+%ں "tLI\hb厹d 6Zru40 JkE 4gאj>s7xZ3ƟfǷhvs2_uMS;^j}ƟHS}$KM(<̚@9XEt9U+ `T\adWKQQ<p/#Uqš xEl %sQ*‚.@tj-gcaQX'v[¦7Gz׵ٳ- =&ԓ֑0<. 4!K-xFd6ycqt2asY2\$Z]|YigcV'%CH9[o ڊhsW;r\:9PLם(51lg"e{2w| p-&LFypku}Jfpǩ/zJ:;iw\}|EG8\t5˃3'܏:sf#Y3`+9o|a_@^6<~ vo9; s\6۷חHU;ݳfx35˷Y55J iK-,0GQes('"`p 4BFRT `+sa&TI^%")9,"QdEwZI%pD0_'cix= ½ P&骣/gmkp(#)Db F[Ai!ZnAE`(?ڟjkچ<y)(tD92 SBDm\dV2#3Tc=I;Kcvi?p pRZq`{$eQh՜c*ujW/ǚq: YȡgC6YoP\:o_]ӻ/_}p_.*PqQui--5p\<P;znJidN엻OMo}}Z[M#0 t^b;?$}q.C%@cB5;Ik縟&yWUy0Wɓ~R*棜4\</V>`łOb,eTWNs/?Qޗzo˟.~n|F_~Hǽ+@cPL}5άX_~e{$'n  ?^,1Y㯿n.d2gQ$HR?6-C|?Kpxk{5tZ<+[ߕ_̀؀e, a`D8y Y# d~_+g\ &ȡYϻ8_MJ`^Ca)mQנт;;&4Яe&ՃM+opTV+(4-8|PG-f+C{F]1Y bϸt:9$Ssb`]27+}=OL%&3w7X0xch `tJE`7Gg SB[P:paQ+Jfu QckL ,5FA6>{ꃎZ ;jwtr6FF >HQX ydKeJ:K\ZR,f}R+I'M4w9i[w^ftZtrZVu.Cq"ZKq}Rrlks~5-Qix`,(3xn23Ńtpi {fg!! ,g&z 2H 逆9Q,B$"MlUO `cJ#68R$#?g)Oc.*5Xb-IEtJHQ>qъUUw7UXnͱӎty0h)/>mA9 3 5WHr -%& N9ja#y2!=p4eǨ#N¸D1;k%&₲vrDˁL Rbl4γdz7Oӌɇt) *x"TuE"A<V\XPuB)MUJ BǼ;2o}&M5hX lt_0lisVod($zF`г)J#,OwA])/mQUX>qU么+ĩe'Hq5׊>#q@O%B<qU'/ZtsWlzvqK$+ .>z$E\=2>dCۋm'vA}: ;1Hm >\lUb-$O]\%( ((Iw}UXg#G\UN\} 8EX;*О al(_B5kc6~7w} c Ax{ 4A :`o,'B=}'+ !lOs?'Ŭr7BW5[B݀`g?j4唾Y'KmnlsQ7.hxM0 ~EV)#{Da܀BG_by sX(XX[]WkS6+R!Nğ.O%3H`SKM) 9b`ÅtȝtP#_ JqTQܦt:%q, Ƹ#Z)"k/D4M5p6gSYBVe2 +v(vCҪēcX':BjHwGJ |Tc>ٝNCx͓]uW)PϳFŚrBl:_,CsDgEFIC 1αΑ|fBig_>oky0oTf{Os >xcyN8Q9=ׄ_pJ FN[]~Gm[ -G1Wl{,X׌6]+Pe^'YiQ uH*E -$X`T9F$㑅H #RSFDD b@7<+SnY3n -+(qhc{AX^ގzU@O{ ,幱L>Z딕ǮYx"GB9>"&'Sģs`Dj gHBAc)'2qJRVa S CY1c`t1DP `JKDzgkli9s}Q{EA/%~Ja;5Vd{ESǓp'`7.L rnڅzx;)U[ջ\ x>j~k"5~` Zкjw9otf%Yt-shWԺ}Aϳƛ;Lr==$ou:9oC`ilW~R~vY9qՅY'biӯn>'[WW8k󔶻Qg?^JToXnm~$\0l9JշInꇱձ:T<ބhxyVwqg.NjG9twkb0Щ`$]UHH۽nVR& 1A}t TQ͍QmQ'(/$ϜOo*maT6&:3adHqՂǕV=zE^fהQ4u"g"rQZmNg _A|ORn'XV[mlNjFeJ?:{XD k#R:)U€aJ:I,hi-Cu+¥&LTZFe)A@ -8z]dI#[eIll9Y#].%gg$$KC SPT{#X-kQrYn[f9:@r9W44-I`S@KJX뜓I6@{) |l+P/J`4#nmt/l ԴYGZY3 ^ hL[']WC0W 0wy5=a]PvX!x;IMlTX D1PmQG┵w'+xB Org*D!qqBYPMX"fAb%>pwIL;"xքqg̏w+zrS.>^ .sqlà# +.ʤlZ|k|-4fLS4ZpP<Â#v8"D$p$  ))LY퉵:xc6Fv ?D҇ TL$Аc 5A)@ 9 qD d`ug78B8F/?l9Pwm̲*̅c? _7Es 3:&5A:I#1C(1B6\3E ǃzFo_BSM4l&";LZap_ ]r6Vy #s5s޲G1RB+TS 3&2bn2_Tuz)ƚypF)9`AITteAkh"tJ%0JhO6nܭSXl5ic[O3,|X, a[VEX[4S9ơ>b-JE'w@ epG5oE3 y 1B'1Ĭ)\*q)XF$"8DrQ:M}M*ݬht[fYjyXd|;"Qtg\x9( X|-R,U2U8_SHuOeO]xʩFȮQ թ|1a?J(~y]Aٳҙ5Sxm~=sߛ'%+x嗗kT֮@ A-4P1Q^Zr38)>Ч? B,W\ꨏTw ?/} Ÿ'!RhXko ԋ?/o^dZU f`Ϯ1;E}y86L6d>y KqEk:'Xn?OȦjHB7 o2i >{N2`#o2[:[(]tb,CDYV \7ˌ&,5١#dvﲃӳj[ zCb.M'a_uS.Q}"8 K 37sU]}Ų9Wz#XA'+76H@I 1jMzNG ƺԫ*q(;}1F kꯑM\HP;=hB(-a!Ԝ:i9x8ZܑPFP&:DǍ95JZ(LL;E+ݝgUS"}$v |cN31鼧O\ D3= UЀ>{yk:Rk1$LP.Hr Z8$$GJ[;"ʩHSk7?v5n62o͛3l"C7! . .j \QxM6C2t#͏9C5ECFͻY&g.&dfW HRz`X cPDyF@k+Кp8빱MT&{ZĜ+7(OMh;%JbEBS;Ȁ "s!%. \oH%L1 !<ļIB=Wyo(dd N=LF8QKea X.Zy Ǎ:>6[yirIs %wXTZ/ K[YJ&ždR*F4NScpruP!"eQMn 8 ƺ=Z}L*ŏ>1%n:4T dRjo5n(1 i Gʁk"7̨ug7*ΨL~raxy,ƍmC)‘ROF偠-{U> ^GzX(eMe񌴷]G.HfE| Ԃ.*$iQxurISEOyF3Qw}plyr.NVo!,?.~͵q#W`R(` ;K $!#xF9ᰒ܆&B c`-'1Rf3Wvud _ۚJU]פֿ^+V1 M>f d*g k=Rڛ@2a)5 >r|TS2m~\⚛*_a|D^o뜝-3U@nHҕn#bBxS @J8-loHלs\U4=D6v@ Sg"4DVwxr;0*8 'Q5RcrOm\ fXNI"BtqM:bBH3*!K#z[Hఀ *ߙv3瑉;1| Kгǭ[ !i5lݵu#gu{e;m Աj="Ne*+O#Ȏ^Z@"'_lT!kժvi>ښeD@9 !͜'z[stߗr[ߝ{BP 9㚠W!/_h\\.ߝ_*bCf6.m}H>O#Hy}mBpS,Eڣ!Kf)ơBφxMmw,=z{Q\j7Ň7w _vDm?h;kmy/G YbC@W(P1&-}ẻRu6{#3>)CRBnE]qwWK??)kO"$#Ek-Z|xSNr^#sC_,n[nr4Xzg1~ڹ>ܿ:BݍTw| TYsxߕP7APw.c\G2 UDl:5uج<+oP:pJ f=fSfAZ+tW`g \YWy2WlY W \kOu|A7RpjR7֡9bXؕ6a4!Mv OCp%zr)k?>)/r/mq|_72^b hyP !Y[ȈTrXR1YgW3'V Zn/mlf(1c4'%|X=>;}nљONNCc,aߗ˳ RΎ밬uj#k߆m?է;1.v{̀?=%wc.CI=ʹ{a;8P@`>ywӕ1RjZjZaRvUc!Su˷G繕O(x"iqtpN^re^:⬣M>dXfSUs))IngT7hB:'6w3=6 g˓tyON%YۿN W,9NM޼Eh=pݗ t(+[|cE2,3%`4i-,j&)7u2.سizѫsE٘8mXeUFb4at>Q .(\,!fPWwMn<ǧd)ә)J&b űahtD@A ,Ǻ/Ah2&O2u>B67FpUbCE/Ϸ#։JE.s!Z+QGկpU^RdBB5XfypMʽ80&57>d XN+/ /H1[-82!"SPhkГX969v^R2QGw=׾2W"tg.2 Y @p}kh_]7;HØzw{}XP ?|y>5 p.`PPZհcH;{/ޖvGpvGewTu৾;ڬDwGQBM>];4u-(_z>@HCp ‹iHF4Ks]'B蓼{ =$PGrM)*9!&U ):ۜLu8l-CݪiD خRDF wΥcdٲڻm7s^n!:26(ܯ]iW~b4ۣ ޿J m1sm|!ok*U%v:z0bi!1FǾ;Q{fx0${j.YT!(V@U}ʌ&RtF.0C Q 䢒E]BrjC茇yGQـq*m|쉈""gD'h6 EFǒCpƇb@y򬵐,rGUBd-vED@)$ڸQ-f&]bR%)Hc͜Û": .ZgQ/.θ3.θx߈Wʢ~| ‚4ZR}.VyQigYnŧũaѱ/rg+V1 M>f drL@zUk5^<&)+|wٖB>nX^/jc2^;pϹ= u|o56epZ&يjI,QGs!sV@hLY υBÄraDVGc\`.D}JiY+?Rp͚b9% }+h DT1 \Bu@\VV(dUD9̅>! g.'Dh JZ=~!uYC5z2]isG+2Z`cF<+Zd? F$lpfh$$(¢:UJlQ="NAHJ ij%HՄsP*qiL<%Dc>$%&MŠ@36m7ZUg$Y]C1sqɜfǂM{MWĖm3yMI?MC.c[:z?ؕ#&]+] 7p$?^T+W#3XnIBd*juTT7xź%u۵Xk[Old'@{C׭ thq+YZo\.CďԸ]ۆ{L,c05 ,£jnDh: Q 9^I8LjDMma `I)O`qeU^\C2ׁ bsʩ. HN$w$HJw."!uFgUrC=a,"Z)d*xaBI%]DvAgڴEy8q_t6p\Gў"z/h I 0H.[CNX B8AVE# J%FնK֪s rAփ$`F[ИɧUD #QmyT`) 9ncxcE;7?`<,yt -g,ck y$H3< bgb] x$ltNHZ9!ԪY~_|%瘡`l؞6ic= ;פ9j}rJ͙"Vڦɣ\3%9y 07]уG?wUXW_PgQ]uByVH췶9pZc0yQL 5K2@_X#3;OSK8Y:"aA'; ޭUe9m-;y-UW_X- PiyWbȁ7ꁪf3Hf2 ƥ=g^fՍȤe)BziWwfAd t4h(>r 7`OQrqn(ghz sR!5(O+# .nD,k[YhgK e.[qi$Rx%1δq],JEnEQK%\:fz1UARd|wg, gqb ˅F5ǎRaI":GG}{IW=-wS w 5 E<`TؘHqJHȥ$9mTvc, X歼4`ɻv)RlOoQڮ xhXHD+it;I1z1BZFD:ӎ&S=<_Qȃbw*ca  88FbuQLNfw $+>x:tEqݳx$HcPb4!rpV"+⍋89efz"I/duٴhmVkEm#^{%[PiWf/*WCL7L&CmD`O)E&,V.pO SW7q8-u4_ zN1E'3?ݠ^>?.~?,Z~h'X-BP g0Fau^a,+T䗡ϊ U._$4)^/x_[PYMʲg 5A+J9X.oJT0իOߦcIO kJx~P%KOT--qsKk#UJ*Up# ]~24~?j4.Ť v|>Pŵe*Gs3Ώ6/<&$` KCDb$dRkɅ'jIvJGMxx"s a10C-@aLT0) s n I"z8hx;׎Ky^lvJdF/=>ZhzwWRE~RwcnLc.jt=# eޘV΄>s"s%EdC}btny)=_7Kw0EXS$r@ĀsLV!Ut\G%28Ed.w+rv͍MTaI)5-a,^h}W۳awð,h̲eL8w`R%;G!yp@M hR z˜ _uxt",i-wD@4:@ DP3âҌ;$fD0P (O3@2D; =P;D.*Kf=O]kQ-FS<`2f85 •H VL "(d0ߋ?\'w42<<䥠r04"j"_c)D"XxDjw"dO8)-8 z0Xǽ@r eQj!% 9T(jw͌ٶOǺdfW8+EY}60fA:sˋ0Oo^yz__=<͋Td&KQ ;uѪ1Qg~xޡ\NlrC/fG:)=~~z:{cp>׻T _N^:?Hepx%^_+Vc خ ( 5 'JDLA"mbvRbb+t@HmojRz\Tkn+Xe[Zn,W!9"m W Vwzq!X{kxi#]lh ]dmYGc1МP]98KyVKD:ȧDɣ/޸dvv!R,̤ӑ(#YXU*p|`mx: %ǡ'@ǡ>ELO[DA0<׻8ӟ2[VoΔ1NB_4Ln'½u3tVtU\0\vy+8*yf]ߊK?厍NQywvC7`>P󻝇vʼ5 vw{gbZDv"^Jɼ8[yNGP#3S C/'3V֐۵W_l\QP3rRb2[ܷ19-Gmˑr,( E;nQG.qc4!cw2KLe%m7V8q4| ? W~̒_׼6[¢M1?_:b8tDPJ$^$0Ҋ +qJiR^~ m'wfl YkrdžEΗůY߶˸cdRƸc¸)P2ݺc#9LdhxsGLe)nq}ŧ 2<Y,f!Ynu '1beP_"'K{kQ9$6i9T]i`C*8ʝ7\ 0t> @1  aJ7ڕn\=u1|e.]X5_:Nڌ/<݅A~VFA >}7~eݦ/i!^0a9{/<="fK`JҸC5 Gl~ZIj~roe1EZ3&v2o;Ͻ,h>gfDՑ?V0#(6cFL]obQ#+P;G.\Y'Tڕh2&u,FDV7Byډi(Y{ɱ6$Q3-9˜R 3A !Q%eBhDTLlQsL覘 ?gob'&,? t)oyF(gLORb"r$gM'՝JzBt 50Ų1th CRߙ"H4\BW -=]%tut%Ia1X'1ոtPVFJn]`v*%)t2QwJ(j Jk0VDW BW7fUUBZWCWtǡ7H F/ep?~pr|Pe{mС'| 0F1tbJh);]%tutET51tRJhOxJ(h !h]1$ѦUB{0ѽPR"[F]eU͢ n?fJ!Jjb{kСDDXU5Z"NW %U-]]!]Jh \{fgPUB{P(X&B7\EBW"QwJ(vZgKW+.Ւ0Xb;Q4faמѠs2YUkR&StiS;x~ڳs^#i;Qxj-+KI'_Q>m* Wڝ^~M7Jb7_$z7* ~5~FĻDywbN ƌ&Sf]1h~<{.q^6"$N2aHam@V:)U+PRI'K%=MYyNE|1 }`x;t#ALtw.bE؄ A2 ǨU1kQRk h8z<[ IBƥ? %Xe\٥q%w\~µZvxgt0|qaL6(C~+0??v.|0oBs,.J9.[۰Ҏ^H7TnU>|J &1R9q4G֨(AKMOPPV:AU ;-Q1䄥 ^*m4UiB[1J9$JT)?||3g5йh|3s8'SwN޸+}z> &M.Y'2" M&c(L pWKFo$p: qh/+W/>/(vr˗:0E!xZ6Q>I{¼I'wQZ1BTFدXv NKtf61h.>;_#Fku{{vpgm/ R}Tw@V-4g_ؑADe L?2t8Ź>V8TKa^CpPGփWʼ4N()bA@T(TC$X`T9F$㑅H>HԔ1тPg+RD++c=_ ÿ}0=@|켱ZL'wEٚwnUO~U<`Pup$6°H1J"\hJ@P46ịUh4BkME@͝QFGRFL328z0G$n )5v~>槶a K2`SF,/dYRFqNAps4$:Y3c|i p?ck9c -GEr0}DLO$&<-΁12! j`% #+;e&$(:o^eƌL"ˈiL[@@)F+-:28;Z7V`2z~6nlo`?$p<ߡrIW:<[f_z ^a Kgvx><>{0I^u b6۟JgECwƯTZn{v{oB Jf3iݽm]͋¯W>KܵCK%C267ݩh.2nA G_xf,E'[69o讛`W e2ݥ_F~۽`T_ZR<9: 3t>R7&h{sgJ'2Zt %oϖt ./Y u{]$k]qbuax!2N΋(n;rs' 4g˖1T-ci!LV=M &~T!^+k)L@yM>jKiJĨ6xΨjgB3IŹtz%v,mSh[OӅ0VYUNYgBdb1}fsU6U4jsRZ\GNY Yw#*%!` v ;伊K$^ 6Xu9eK6msʞ'OeF3Eg̛ƹ(9 aS>*b, hR 9K1|IBqbٞGE^G*#R"%1v2J@fXTqXŒAy׷hܾHa; 6=ɺcm-ab1)r1éV׹;[vtQoev|@g|= ˚3|F[M%8 U(uCۥU#^}Nyo^*\ɠ8,EVn|5TnUjv)U1 .JiNyȜufFצzUm?GWοt WNolsN(;hHVu_,Lrkj/1+ |1}|uRs(ͱyg*/ osHr!A$ѥ}:mo#yy !vȝ(ĭ^V leF >,ciງXi~Gu"Ѝ뻱1MlD:5[ARty1aCگ,u]Ǫq}&Zq|تLze#3WS]MU䢹z[[Wֻ56᜹~Yzo*I(ե떋ר}?eq 9BKor@  5oM] }} yBEbj0Cmr^E&u*S[}I^;0*>ŝI6Q0$Y$eXô٨GQ `Eg13J{< Xxr-*gH2: G"%1A+J(5,e2Nǒ1qlyi=wWydYtZcVu!CO2rzx6 ]5_n}0VEm 'AQqʺC!fE?1;x"]A,$ E/1,SkTAC5a0cMw3O,cfGĠpP ̈9fyh`i?'bi99vYGJftګ] QL8kD(Z^8l>0~F8j)1a#{4H$[4%221e'lGTHTA҇*&hI4A3CMP3@p&e64B,Yַ5qvGU|Fj8F1VfoF2*mS<9ϛ0c`R^TA!(P#1ZHw>'[p%JPUz{&M6p~/ٱׁc98+V슗ƾohe=G(G8l*߿T@ yxw-8:"E"RHxb ')9!s*yVoO>y;)(A 4=>X$Z2dhu:% Rpc]+G3HyyyQ?_fq~rXR7ͶlaL'28iZ7бY┞;EU~o#Ө'$ (/}hEG/`gI~Rm.FP[Y!:F06k$*Jw,HHˬz̪դtL{cwKY~uq}mOʻIx[# J:Y`T3\x(9R"X2y&"XIqC  d"RKrjv2`Ytx%3met`=wN6']zU%/Ƭ۰ԕKY4qv?YQADjÙ7XMJhi8%Һ`%$8:iebvg4\z6BO< y4 h"|r:`)X[ESbN;oGɫ>P CV'wc[qڰOQ{RL$:RBdiFfDsBi2آҫ#QJYWM :{W !CQԶgw%-KmxVG&32, @Г`4(&iEL6chE`AG`0Jkwgn$7w1mJPgNga?i&<q7/++?)^s@ PJr\ї?B*٫sxjOI\,Z`Q X Si0Xr9>4ZY\YkMȖPcP㛓QE;J C&.g3-ͱn ]Bގx<]4wQM鈖Hoo!Ypu9B{HMbǡs6Ds"4.rX%t,6,w)]>>,NQwh[=|"`sF3L e22$#wx8gY`;E^We/rB>9nq V1`$,/6K1(O:F vӻ[{zz8y{h=V-5Rn8J}Q'o'u'4p*t~u>]ZދCXmi4U/X_E^i¦þF $X8>L\oypƝb}bѦ8?^TɼSQRk.e=q_:&>$H-^l(. : m\<ԍPBZA=6#$qZmٳnMݞ,Q^rgz6^TD w) Dž2JqDi͐.]+6 Pkw$6jpuSTB+D˴\/G5ͤZ#Bi[>guiNxfZ%kkIPE QSʽN윳W\p HNarDTN] qZ-\$DadNǎH}#>?4ky>A^(LJГ.p'7u,by#gy䋚ҢVY gT$w!- 2x0AgDhmeҨVWEq.ƢphY$ vO5ZksU舒vlNk,>Ciזew;zw5tB9To7?qXי֜Ļ11Qnb~Iz:cRȗLGւǙ la#yTFtCRRgu~>6:}`Is %h`qcQQk=pKc}q#91/GMPҜ_8&'U87>r Ir67}S**߁ wV3S8 E8BRF%M)-.zB8ORBr$/qz^  .tJ`U Jn)`Tؚ8wkViH[[#c_,-cT3s4[7+ݛu'rs ;>#BLIБs'OP9SQ)t$!IM<6Cvl`;NnM.G$LP }; D02Wٍn4 K.池vkc_6-P`xIx8Hc)P#A{)`k"N;'IP<)})zVZЋ<*Ě,2 G(`s~ KTֶ+~FCX'"68!"%"v }6 VgxZ%b d%WA2Hւ&4"o3IRQq5D%Ur_<Ȋ%bk U !w9PPbK޷5m~zsuj#'ڟJZΆeX0JޭՕ#Xxgpk/mmP>ٲPO # Ku$X].o?./8!dō'9n^ lB-$k |2n2h2b V919>?NM*T&bKZvTnZIТ E{Y}r׀YWbH"!b# vww+.9ALE~p%=v+B/bq%*"ɊȎp(׏w%r+Q`bgL:\ݒWN:E\y44#ʂgA D-~AR)5qő v5#ʂ\x\{Uz5jqpwlzy}|j`-#-0c[Oڷ5zX0U0d{ ~b~l'\8ru+B7{D&\ # j| ZveXI2\Qg;fͥSF1fԜvJgAn$hԲ`Բ&h@鍠uM-r]7ClQ S Ǽf -s&!MD<3 yf Eh S&L"W+|7eQkqxh'\ [eTGD.^p%jWR+puP+\\R\ZƎ+QiՄĕʚpłW"b4wW>(Eǧ+uGUѿ>Lapvlpw=ۣ$^4~W~t2p5L%, p&\F)#\`cl7N7LñJTpuʄd"Xv+{ԆUNW\x'+Pt+xWXD a)~ѭO e g-[W^n2 n0-r=iV>E 'iƒW,RWv^%t '\ H:•rgyg'rbX!8QQNWH̅ ~AB7b~BB5͈~{^[`y>MalrlyϳRb&ho}~pOB;_Ols]ַluEemPˋJ,k|Yr(˫6ߺdЋY9tY?"߼[ߠW|޼-BB.PPRY&[nvx&X>` O5]z'}ٽwov{ܳeif/6ŇZܿWxbT]߱aeQT_0H>z|>zQ-[8 {9r#7Axxd{OeႷ yrolſ޽y 5hqUޖt1Ic VE5Qc=ɓ.QJ}=R{[_]~v[x/—߭s1oW~x M:+tu Q9 @C\{=ߗD@jU 4WZ8(TL.4vjHysi3 z3 M,RjS Bnst(Dl,\袉M%MI;%s#pj!xfs׍MٴV{t!CVՇS !٘!^˙ZYb!JT4qّYRcٚLaP&Rb(OrCsr6<E 2ˊ(_J V gHF+ƈ *~o˘EV|dg-&uHl)RtSZaeX*AKʚTM Дcm9*%9­ ^5D\ :Et![}!yvk7Akc} .GA?r]=FnUܦ6ph wH SfB,AmT@_[HȐXOֺHL (A*Fm:.f E3!VE1Q,eb&JlE c*VpКJ7CQǪ9oT*.%v`TFE2Pf*mb hj. -k6M.xu3[!>H6}`-$ >:$ŁvFDuȎV%SѴ!]NC QVfac:<9H^"nakBiDd§uWe2^*S <] ޚg%$0AG*!d 2iLǛ  G_C`F ~x}% TU;+QkMA6ll`$wU<?_Tͻ?Hև6ЧLU-f-"M{F7`= ѥvw/ @nG%|yr Зm1&#TkR DtH>桔ܫ-}Jh V$Ŏ~ r.ZxP(d}"Pkhň}iayAP΀HeGv Zc~7Xx+I 钑" Z(m] !?Gx;j8 wGzgUtJ> { Bhe/@dֈ^ ҉;mE۾6@VHڳB1MYJD-RP:KK[o(6" e4{Mw BN($`F4$/a:1dYrmmkwܣWoqgKΫ /ҼiWE\GIt$^`ԭGloNN4b$t.llG1IH(wputkʤ)D' ' %fƤyBϯwo0#INZAI%<`Gȡmb#ʈҍZ:[$^uRT,Tz -R4*^ڂDe)xEַ_Gɰ%6vق:c7 =¯(8'MWdA\Bv&r;X.aU F.LjiB)J2rڴXQlz01n>-й'GBѪc%9bNmQwZgVݑ@ZX{z b[D9HUhϤxB0sAma9kZ?uݳNKAUPmZb; ܭF:"w(|:T@: Z6Cja3_ 2)i w؂8h{*zB&Pڮ;Q[`~pi!XWM]jWKC$` Ո&@⌋vSR,orlHW3y[(O0]ghwEBޙ.8Fh NCGR/*j/jqr)j 2Flq]MN6`|ZCԵUjz[߇E%ոUc oI]uEv/i}uY/K\.TboK|{s 7Z˯i@.RPy -R<;o0y^?6~V1#}Dn:2f׵9T8si\.m,a9 Foatzz=f.WkbuTT@֫N/ԔZ=ח@s)jSw?˫ "5 h#\.ܥp< AAP ]\W ]-ҌԂc++7oJCHt5.HF:%p tejs-ջ RP]+52BWvtE(a:BZ +*+Eb Y ]\k+B&OW2X#+M%w/ݹZiܵ}J]lbI3 ~ZtȀcrCťtbZճ%Ԭо\Ku(0bޏ0/K;Vqf-oZ]du tq{9Z|~;oenۦWE5Fޚ&#ODMiR쭊e~Cdlgwڔff_l5mL5τIlv\o.Лs wl#elJ)z)9KHBIe+iV&NBӄ6Nnyji'HVS]`竡+}-tEh7n97nHcEtjjbmS+]%]dkZ`/)!BWvkvzc+z? .2@]@o_. 07%_#-W X_o>M6{zq2Wkls"dس6\zcA7}_ Ua/X^ͻſg/w{~hsloٽ<;o\OzsN,S~@ݚ3ڠ|4X?7v4X˞Ltc:p/'A}^pr|MFOT2ҹ=Δpޠz<{;9?4,j wlʹōdkzfk]O6q >WV`c fк疄2pnkC/ONWC+Fqh̻jJ31AW]^*L$TCWDzDkPztut>VDWquvtE(}d:BҚNĬW "MO+tR0]!]D+m+몉Vԁ(XOL˄gҟuZk]ZǷh~=}4l1CnJI1Ҵ3*_]`[Ϛu"a[ 22]#]yl4;+{zqhPNތW[UԛRBWVũzh75pp"Ub ѕzЫSaOGS>98vbtFЕbzסǸ{g++UCWS+BiSҹ 25"n 銎qFL]>1v|F=gmXښ{Y[_-'݌YUyRըQ[e)/78\(`-Y$֒bChJ9>Ą%]΄93LSI%nmU4 NCWZhо4iizpFk+B&qC(ud:BBp%`g& h|L(%GWHWA}*]"ZNWI#d5EW]=tEpBPBԆ*dM ++RMLW ]G>WF'8qhÁ֮FTbbɠAW]^FdjP ]Z+NW1]#]N<8zQ ])ծ"AOe LWGHWFH!eEtEnJaNWRFc+cAr&ęqg& tDtwV?VjZ`b54 ZTCӄVO> &S]1Mv*+* Q ]Z5y"쎑״`+kb LeLWGHWAjCEtSޙZ hO+*Jo '덬6כE-#ޜ u8(Fc\sqh݁ 8ơ7#0]K-*+V&VCWZ,Q(f:FRZe*+U5tEplJtutu4>VDWln= +tB0]#]Yo멟3a'L@)1-(Qѱ'L6jv.wR֘7Fz)?l=G-2ϓO^`[eR!z|]"dX)TK&J!.e%rӺMArB1O0BDx: ;x'`ݶB_,_/5|>/.ǿ.No Wg+`o;ǔ 6ƼQj?YHm{ >:>Zm Zt}b=cu|Fo|wY,\#&g~~k]mr _-p4?d_=ˀ;xj~-÷˶вj>ٻq$ǻ(a`q< l{ڱݖ-J-;8rl5N%*RU#ŭiLN cٳkK>q|?9{lSmʸT?B]jԲ&7f-G1l ˀ,& HPD 5 A"p4?ܛǴyfa$d>oP]K,XA IjlzYx B{K)dt]׬t;3,빝Mo.ڳصەUm)L?qӸfon`wMfv }r9vJܢkf:̠lqN־ǣ'CGvanr,85e&7X 7/_Ut3Uʣԡ;_\z{ =;hiFsi,%z{ߎ=ͨd/_"M3%1dWfA8ꀢŚ?, ^Tˢ~;P`>ɏyג[cQ*zma6't~߭A*~$EK^A_gL)B =} t}-ڀ+fMGf2ɀ .,{%{cZSN2+4,VY/byi@;=J,RF ] r2影N ?5&2^p9PāV,aR*LjdbpbxL!(g@2`k U cJ#68R$#Z{1l#ltEn86 U\ ,,0,r`k~?"#_8tzIrqƽ9K#Gp>K? \9xvsoa~<8TLقg7l -gYȮ o ُf8_CM#X|6m* WmP5xV~8e-J;Hu&HEFWT록/QĄv8mਭ|zaUPVjׁ >F+]CZgBZT2N't4,%26A Kn`Eͬ$C~ZeX/XǗL!e0Kg, *#A3 Rl En#w"†V")Pʍ"VFudXDsG@&REb?D hl{)5q/EnzKǭA66igó4s'u =H6ˡW0sm (E@? 0"5LIW'I+ѥgIR*e 30&D-{3{)SuC\*|F9;C3n-BVDuzorVV3‰ʀyL?2[=W~;\pÕ_ v.8i }&*(8ɢN`G AncPYQҦlP Ec3(kSw|f烲A3r Il aɑbtDlД (!0iu1VYE 6 (ڀmT 5 htȴl߂7`dzd횿wu.gC8E1Wtg$Uʿ2_XNi.I1tg2hSVZu̱cs@樈P4GDx"11tHp$4F+y]ΘvLH(:o^eƌL"ˈ)PZRJKD˸5qdo~ q<=cA7fv86D{ 8_a;*\N7pz.K:@=UWUON.,Nj˦&UZ߫Ā?.&׼F3(]?һ{*]dFC7 d%J[vW>yUx՚5JևD[=j!:LcүgU+-:aFm{TsEdV.[/]jaqw(zuL\iJѩ-۟VXjG<YKtuSm,?v؉&_η}d+ӭ2.mXª.5Zf²E6ke-e` 0GM`Hw MX5u"3ʷ=ɕk!e(y( Gi:)>PFV(cZf:P1AlfAE(u5S}֧厔yy--ʵ͎\m G"$NKYe4"RXBI0J*$%ҌV .Jǥ&LI8FҎY | \Kp=^#Z^'lMқ^R!3t|imJdhWMQkbǼ}NJ;Atp\Gў"z/"1xpX8 ɭRii !',u!|Ri UH9PR;mTm/&s Y:g0FX#x^-xL L^bDJ`d0aՖG VXbk<4e~}+pq&e(jكK缱g9wir3l2TP*'B!`/չI9t@Zqa;RJc4UDb$Θmx+FM՚44([/z~`?YCk,g ͙ V|2 fSpRVfEǸ] D78 Ō93˭"bePyS=U?b&]( yå(<3cLWL#,8ZI;]lLS{5O57IkϜ.yc~vR4;w.GTm*OHku5'|k9#IKȥ𑤤;+hK e.[q;$ H)VRKВr\uxbPEu">FC{myx˛y:9s T"0c k*lLa >,GE.%ΩBDs?^~Go)8].MFCX{ԯr{.ό=(x~Yt{* E7AUݿ'ߨ[&З"bj0)>}a|2}3*7O+}X2che~Do_1[\kN/k=G408>FğAR 1俬p}p4_N U5mO=IOW!N"cgyΈ FЇW0*+Z}*t 4:$cf&e2${4ErЙ%N*2zd} }GEodh`9kUMVYZ!#-CX[H!'O1,9rKB49vAh:U՟M瑽^,SdӤs/I%D D)fhc8!:%d @haLdm34[MTJ_ M8>ո4/vO]ޒ'h@k9%˜J h5Ⱦ5q$ũֻtn%=g As) g4Q0 mp Xk_e&cQqЁ "јR*[Z6K',6 @]xa0:H,99 D-3fCHM \(bk]|}bY "F量e+̤T|4Iw{Nߦ/߾?{Fei ʊƼhW~YC(|5p\,GaѡK(ěO|{_M;jm6UI< Q@O(?2 rM[.:;v{( ǮDi4Kj)q+Zp{{gO/5JZr8+<^;=.}PDxQ\)gNn~[{dŇPMdME#4ӣCjDTZl`3o/˚Z4p偯N/}ǢPaju\]&x9:F]2H6IL8wUGi hO-}iuQVݧ=~]&rY:z72pie7pjY-nVf^]ZfąϏI8fqY',y' FkUڳr O/ޕ#)mò0C]tQսeϨOSzA藍8? g9i 26!`Xq|(`r.L<5 'Og{cNv9Ŏy|Y e˝z,( F )~R/I pOltqN l4fڐK/]BU5dIB Zz w\) M7yޠHzb[V "m7hx "b5h]1Fr0VaLJD<V0/C"ݰJ$gi`]TNӨ^Z"g9d C:=3R!D.E=̹ݫ.0C6N_̢2I"Lѓoh1GsR4k#U m5~yK-RwW)H))Y(.Mp ML^X< nW$mr`2 7g0KU 9V6N]1|̀ٹlk:rq5Yc',Պ,>* vQ#%PAJE⼎I:ʾ۾w Z^Th985W$Ci1礐34<W6ir3!*>ҪҪ/jʞ5eEY0i0"B*gYG|֠b֒?H!`hSIuAhDB :"lї6KJJcB Zi&r1z3vfI\@!%@ZBnH7mN)^~)䳦 _K?[TLՕb> VtG+zϏnWbzqNGuhfҗ\5#9B2kGb8lX'΍luZF#Xl,=4w.HJi)lQ:)f0xmJK.| {˥L2#љY,z=Ygoq=FRJy_Q݇<',Iq#W%(P2%g0 !Y>1Qبzfu&ɴfF@!!AyWLIz^T9Ɛ1z =̹}Ixğщk7N? %Ҹ~i*EH*œɧ@К.gj,@}M>~Wޚ6t޵$zqR J#~"3I)ar>ΉX@<+2*s{f!}Yp IdR Et:Eŕpt@od͜;UK+W MX+>B05xOpv<]_;><>}ርsI&M)0p$y \6JzXIdtQp320"KLxONnMt lԙ|;VMҪRfA:2v㓸b{mE`òDD!%UqMFϢ I fqLyŢYW<̨ 员!E4aML,(H%՚-)r]p%x?r%iz̹[~.Rq([g4!"g+"ިE>'$ՓcUާArfDSBGֳ䜓 pƔ'3\D-!#JkLI,$|Po܎&jjO8i^).qQT\xV8NYE<@d! (r!HmLYN^xFiPqLZGa*.>.{7\|ES#n;)+ ~|06Ap5aEk&1P*󠋇*!ί)'@Pn[|`sKS|羚3UEHSz?|zã_J$=\~ìqy|F=J\vi:qZJﶔOg-`"tmd^_,.Ph'B"ݻnxnb*yQ-sӽLb 7~z%1fw~iv^wGg''|˒A{{i͌={bZ}184>,L=?.OF3;qk4-b0򭁫bbWZÆWJ[9•Q n\"so \k RUzpem\1m\s]k:\ W_ \_0eOW2X~Ŷ^xbOWRզM5+EpU 5pUE-pU0t*VQh%n\U1WUVUճ+-ĮZn \s8d-;JQ9•Zrj%Xl5@ft10q֕hEk1hTh#F=&rߌ&HyL ox$ŧ7o]}<4 {ѩLpmC플xoEyr, "cQC93jJ`N§ݕtO}΃kg:m2uF__zo6wbh%y@ʨcf`9`!f%%9 Y )4,x? \h;l,pIq<>]TBwˠ6Cjꛝ:`\|zuD+w j4˷\񇴰&j%l*+5L0bcHV"3i=GeY([elUUQdVE٪([GJUjUQ*VE٪([elU}xτw_DbU^UQ*VE٪([elU?)?ŬUQ*VE٪([elUUQ*V([elUXXelUUQ2ت([elUXerbUlŪ([elUя(]Q+Jh8<'Hԃ:}Wu3{H`gv$'⫈ZZgYɻ@& WŝFKRD7?|qq1:G+&)Q5ɔ^%QY?wp(SN8}Q\8eM0"H {\$x L n|L.%&XNBrF$n"SD!Ld{3w&ޅt2pJwG429 6К.NV~f[s=o?v;>۬=ƂrN2V&mk)qBuN(N)# ,' $ \{Nr }|͜u -s+2g-ɇ뿄n4Vg.ָ}ER}']U+*R}FGf5r»f}QI2lrzP8yɐWQ6p,Hz=usZͽԙJȒ -}<:{;:?iYwt1/+1ܤZE9a #~d'm!I&@fw0N`AĘ"i,Vw)6IQMڈ]][V;ĬW逰  < Z=$ ]WfƂ\Ed,/aN~ H݆Ogwס\Z!Pp\^eⷡ ?]^뺾_{{^ i&eQq(G,ݞtg`#ٺy&% mĈn|)`9xVvT Z,-:RTA.j3yy'ILrJ͙"<&zG-z8j@~taUբemDY] @O t/-:Ҙ '#Ttpr~õ~1:\+"ÉYg0N (7]pan_7z s(DXZeeH܉яꞬyyRHP$0䩥& 9b"ÅtȝtȩUhD-c F}`dƛOsQh6lV/mvHM`X4aI37Y݄s‰ʙR8LX)e&[2-vR IO~D'=qi x$}lidcQ0RĔJRD HKǨ%r`XqͫewWB}7v|]ي!<轍A12! Mbw.xgL[jlOۧfRSCcuQ "tPL晲_doo 8#:'^ ܎g0]^гpZsK]΂Ip/v\2QK`SɻA# J\@3XnIJ+Diw}ܿh烙S)[z};'.;7.LfvߞfªXs9G'0&~FN^+k)L@yM>jKiJĨ6xΨ)Qqړp>Pi}jemC0\h<)Bó yy1e9S]ĎRP}(?Ω>f7ú> !y}#&=BBD"*/0HNJ09PRI'4-fYYd$\.%6!`BLB1jveZ q?3gaM ڝ>wX>YҁШsuu&/tCB2dՄ ` R@lp8JDLA"m5{K|`s8= {DI1foƋ75ꊓWvH!JqqlсБJ :ll(D(\!([JLe ?} /8"O ȓ#XT|"ASǨ#N '(QY,1!;{iS9['StgScy'UTP*!r!ZOBT^!aN.eUJSE$FRVݹ[gTOS 5ZENE;]rV69P| f`h$sOGk&Y{Z3J&Z5#Ӝm;vFLUY~+nq_⪁}S<`s-:'ibeP̳$?N^G:Ӝ&&9Dr ]gh՞s:F"iZ+IO2+_q@X%>jLǢ|Z5|( FL% 7nScȇ&t”G3$YΌfFr "{̟u$T4K e.[q5 P@FJZ⅖GĐUڇrP(KKvw#z4:x)4^.+JJΎGO&Dq D^ 9X101$rb##u cx:`IqQfaMIzMOz$X\ J"SR<xX;gS~w[d_js[R4W7x:6Hc%-"zCAr% sI0ֲh0":fO8u=ynG!Ŧ-U2,<0@"FUiD!dr2g!zFl̲l|^rE%^Ȍ:cy$3xPu<(7?vnCYg(HdUu>(+n@_ feH&:3#5nZ^s|XnPK/n;Ĝ!7f8e5.Uʴ=R QJH6YC5&//F8a<$Qbfd}} yxYǎj"׵\]7#"Jtե⯯N緰x[(=ܻ޳ U:O tXa]=?d 7XwWlR<<\@[ow= &oUcZF0TS5@Tv\hbug>Cyl3KоF[0ec%ޡEHɤ֒ OՒ 픎$jm!HjyjfC}t\!Lsg P^9nSI%@70HdRi'D1<, -'5t*/6;Ev⡧kL)W}j:uaQ@tԞ>Љi*5XfO-ڔ3!FU"cVQ0} aU >,-#-/CXkja"qbaA܍9&A+*:.x"2D՜ԛªO#{1ٷ6cF^ QzSBDm\dV2#3Tc=H\,U$s$fjNYNTxѯ'ko p9!i0J<^J} ؝?2v+…]urWm_݆oCNG+ȿa }5Yȱd&HN+UziqXلig_Axߦd5ZX6Ϻ i3>6~Z]WudXx4^=vYe}e)3|y䁭fkږr5?[~æq޼ ו~f]_f(G At67o8}zWSq]oVzķׅZN%,Zر2^*7ý IihAިX5ϻqҦsūB;rB 07'g.=g`t0:J,d΢e!Dy g[-̎-5F!`}냎o{;Vq=wtTRR6(>:0tA%&HVҲTO5'ur+t wwp;bO+pR]֗;] mU˸ 3aqB^O_֊R:$ lNN \ z_J)i̞So[ /WOةKeI:/WOR~y\zfDઆ}2p28*wRj7++ U+ \Uq_|pUTjWW LW,8ҪUR^#\igWW"l;0qoEmf,h:R .oӹ0q2o>y%o5MĢ1ZUQ'{} xXim"Flqto\fcWd~[ (<֋u m0}<[Xr~ߨ`RR*44Sij&_["gify6հ[?Gb~b ͇|x:1UvҬCh̲A%RCVc^g&yqcgLs=˺dtCKQPt t-CDlvϚeڸfYއSs1 |u4*P*N:|A1'dbN羵+թUZ}k &k4Ɔf>/y,,Cޏ6u0?T&FeyQ9YF7m{/=[')"v(7vt&\z..G75o|k޻Я𻤿ttl`PMJ1sP!/|{x=yθ}Ec~g O??_ͮċK6Ǵ}>#5*lTo~s_q=LA)kO7lE )aR$(VQAؘy" eI+o;Nb~}XĬ1ev-;^b! L!Sj+,l+1q -ﶦ|r~=>>qϔ{(-?y,۔y0I F懌-T R2[> 1U J-ʨs6H'\p@9B> F[@Oʓi:^L=r>ÃMQn~r ( 8Pi0gm餘d 2djyvj MB(,IA&P(fPŜZF̒xXrͽSVBLjPUӋ}Z-0[">?Nɻ9G\$DΆMD6lS1 jRdJ(yKQDӝ"qjzvox'Gf9Rۜ 4 52v&WLWq(XxʌwY|ٖ@|<,dz/Ѡz|(A#-5;SB-mನ|#&-tʩ*6r+l*c#`0YIDЈLh]Ljݙ8#vN҆L;Emi@>[*K"hj9(aqؾ F$u*Nhc} NffVtT2֤,^Z2>lTg}xؙ8akԯJV` "v"Rmgb@6Pf5aL>AHVJ& eDޣ8"9Xd8kdQ,Rq&f X]lIK$`z#bgJjLyϊ懖Znji霂x+ЖPU`z~uzV!1Sꤞ 2U9!ZYRHCV@ʌt(L**\xv- ne;Yxm]ͭ NZ"t!'ᤳBX^ͺ{Rʃբzh͵ѯ5F|"NMEyy! B2q'" F#*BTXe^Ԁ'C]םz-lk]4pap{B;?LٻvB .ޣƻdKa&j\NC5)Q;QAv=jb/5Ldl1‹92h(6A )$IqQ 7h R)"τhlaWLb5?=Br"CGZc*똙yY2B7GyfrW ~o~NדUv7}[϶?{RT~yl2]XAL㎝הlPl1Q;ӖmtAJJBV/)Ux g˯tc)iTMZTJY7)R cXGwGwwGwGGwGw "I^>$'Ȑl&X,L@D@U4閬iu%%p5w%Б"LhJ'O!GBx9UJǏܧ.Vjc<6~^7MǺ/ԋ]m@[py69\_uh7JvW'͆3驪As1Xd [-.M{flPi[K yYY &JyۻbУb "+֘Kv[muˢB1_ΓyDF*DTbR. }Qɒd#H2{2$ SHlgx|l.._3/>ݪ^ݢwf7B{R (JBd=!l (!e$v5 R6ml`WUm."AxIfVZZI5`JQI6ǮI:Ef]K/꾅׬+ohV9g't_NDm&~v`J뾺g-1cc rA7RKנs ^+>{:|+ݣ+i` d蓘 k}dޗ ޚRrƫs҅l`~s|zqyy)am@n)_ms_lDJ$Q,/dV uUڐTh mn3\d2">3pt_%dEJ`:YE9;g}r:]aNv}n)Dwe24ns-}`9rԦ?Ato9&'J@)kOsg^ )aR$(VQAؘ ?@A>eJE*|*BiKJ>F2ɼ{h1n}{׸;hKaIkU_Fj_7v;k3^..jo/jwusŇKQDecc Z(+H!~~%糤>~f2hZmtc^< ST6p±-\/WpD)b3!j-b+[ⴔr%JV^j[,McwEV Or:gW4rW[/B{p;]AN?n}YGK%,v5duw;n|PYnU55w]{xMO,b7p[㯭E뇻9=l; \ڿۇ^~o? =o|XQV>|D{3}t~ѷG:^DܜΨHVWMM-cmIp"{sN9-uҶѮN rιưנY A(Tq*pv7pCr|_mS'(پFw.Hǀ'^^.L:x [VUo"N.AK7֥ =NBPh<ؒ}IcT*$yK )H?{ܸpUSw${ln+T~ȹ\xںH$#[߯AROKdSiӵ;Ih@?c%bDYb+s_}en3Ncꁏ͠>b3?0Q I3clQ<)eo{F?tH @Q!3Qb #l B!Dج1 m2Rʽ/<"W! IIaٻY^A/o}cѶx|tv:9 pi*`\h 9v4ƈEt8X9K;D8"$]^OC CMT"0c k*lLdKDiN֊KAIās=ZL6#[>\QoѢrzKa*/"gfwϳ׋xN02teo 8Bf<(肃a/KOLq@K3P~q{^fezݿgeo"jIq[Yf0-&hM^\;<b) @ޢѕ?lK,]Uؖ[O^twxVݬ_ȹ̥.j SY ˅EM?X2Ϊ" P/Rn7tD5&5^q1>Lb¨Fl}}["vȯv dzkLG߂cHv_/qɗ4|7To`84/dT.r=|;AZp-)hXf*x`mo0^UW9=/2~[1FRMԤxEr\{N-Kn+8'gwO!Sf0NXJiLj-$Z-NHBhk DR˻ȡz'͊Eմ}VC3f(9Ao`g I"z8hx;ЫxMTM7ĩz*=(kNva㱪\3K](TpZONL4> ,TkꓸR>IUәꓔPGӎ8.m<70E,X`;0 q흣Qa؃A !HQ)=cb|IB7D%6mvg*#)k&H J3K|APFdFx,9* Dcm-b1H̘h+W"-rZ+m05%Bw|Nۏn~@\`䥠堗i1%DEf% kO'iRţR! !HiYa`I05QHG]J,TsP %Fp5=_V@XQIZ *-+}: 0fAy;sugw?򷷿p..?. UoR""Yժ1Qo}ޡ^Nlzޮ/-̎+tSz7]oҾCq= l^)ty3?{ t.l:,u.մ?)48${M]}JS*HgWR0M?Ζ/(yv*>|Y?W^*BoF{UE]܄Ky'tb0L[}Y5ɱd&HN_*`;}DžjQ믗?dž& ;;MTri~R*N |m~,?(i |= xX}s'rgB-ؙ\LӠJ](MR0IR x, 4lsY77›e/ocuT.^qtrvt8>#g&š0K[,#f]W4<tT1ayK[&F7⻲ٕYӛ[ͻuPbPLj>w{cцSśA5Hĺ0^9laJh+JN#,jEB@-.e8&䑰S#dc/ť=6ic\Hxj) mjmUt5b{rh.T/gpopgg:~*/O.`0+_~^gk3`57)x0)*ޤՑ§)*M~g@oe0~gXNj,+o5Qix`,(3xn23Ńt$177G7g!!E,g&z 2Hҁsb% 6V9ib+t@HmojRz\TknXek6kgkDW\o9G̿6YmW>2>Gw/s\i Ɗ"59e-?5GRΙ{Μ#"%筁$h \%il:\%)ܱR[W )91\'.9e',36p%;zhD=ZWI`ZWI\%W -Ǩp$\=G"G@`PUU)B;zpER@`F &qYk$M$\=Cb,VKۙSluoR ?ϖӘ~,CP)=h~e6`2_xfH3v+5r^`: ,yk`:D[`UVewVsiΩFm*=N0 8IZWI%KWqզ$+a&J&)yJ Dh@`۳#%$-#M$%#\)j-$p$m+n:\%)ig]=GBQZW 0 6\^BÄzrO`O W+Nl]'WIS>KZWIJ,:zpEzp3xXTt{61oTcCI|2{P!Gf@Op&=s5N=7, 45q/ӘS5nι`1X>pӇq:o,×70 OѣZ930&~5dV^ˡ+nq_j}az,HtS@$ʼ&{UeӜ *VDO _~IQN)Ƒ$׿B,|22#3# 6 a}^l!?-b)Q&ٕ~٤z3*kĮe?/3#^W.6v֤ǼO?ǧ]٧C8尜6EdGf>\ke7ĕ匤&R;,rjS-e7l;vR"6,gE;tPEGgvx츚+$,-Ą=ʹmco';{~9-6s;7;&ߑݡ`+˩M;qI-:P1nҦ.ܰq۹nO1-J7CvMuǡVꛖy%`ۡ+}SEk=]Q~3/>8U 󘓛c~81#ch'o~o޼ @CP%P7Y?L(~QwIgd}z?0󋚯s9?~xxn?x]%p` ._v8Y6׳xr=,&'"7/7FL_W)}x/%~{mړȔq܌$PRd鷄zxƝxTvTԐ Q~}bzô,qۢO2<}V{3oX_Vw]|t?[_A|}Q?~Wfz-V-ː^F eO %qpt*.P ~*eH||dy{oh/]mo~0P3\L՘D#]Fєmlc`,7-0Iw SH:gr3m[s)@/񈦅\Ov$KMޢ+`O˓mA 5}0 kn#%]g) yb̙1!v-'!cÚhcwDUR JXD̝,RF3fjXm3\3BɍD%[CUf-%Yk"?09;rw'_d8: EϘkk.cAF5y mBkmL~M& VCI 9ЗI Kqк) ELE' VMYbٚaB71'$Ŗ*dnYbQdBn` &`ՙ%CâXTGhϝЂ:K+4P3Y#ehbJn xkXP[ BEEG|c0ZSpam\_V-'a P>T$8LyqTC8Fn Re ,q/YWe0l~k"0*C/.D:?Rjq^J_5fak3? 9ԠmETc-9Q G.5x X3#\-"'6\m08T -A#Қ`AV#Bť J}7ii[-PHs oz,?qvQLHq*zSJrz j Kz- 3ȸCm^b DcHku@ K5 Ho-F$"X,x$`&䎑!!.A 5v9֚fC':,B 2X PVҠ!\lVz 轹IE,(J8E5nt ߪ3&-d@B[GAPS.hHu~]RE!1UUDI)bXQP2t)w3$$ 4P]~nu5fTYgCB DٷG2l>? !G hA)!tOyra 4 A͎1*MDFg׀y;vL<ng??U0\hY̏ލ# A I`-d3x:p2>A[0mIWshC6Xg OH@ qhxUh "F{[ɐRd5FY!:bw̬ti,GѺ<0t4/%+6C e9(w2h*YȩՏoMG^XEyF¶l2^Ab; &d6yq~oXMQJ%+L<>AgaĢeDEKc !/Vs7D2 +" aGq5s0 MW"/^] 3!A)n~xr F՞"Ʒ8N> ^;Z<,VӚZ$f4PFhћA_y_/OoZUfmj$z7$%xKT€pT9 ]n$dhokqDGjR=UT*eTА2mMUڃ,2+`=uldP=XA|ߜbQDqSj}rp=,6rI^Qްbp. -1@:.=ٌ#$_z1 U ڔ6LWc@s軅6v18WRW =zUTAd(PUyͤGl!&S@Z RB@v%GZٹ3|JFVP nF]f!@6(3 @PĴjfàev {̀z12=pe_?-Д nF]2'ѪŬ 8 8܆RZ7$[ЭxxA4LZlnT*K%MH0IJRQY5m#) -[(`Z@BĊvgnyu!Bq% Ƹ$􏍷[F??-7[{#‹vCN(?OAujt ?QhSTc ;v`z׉`7;N̽x?7N.jx rzrI/͐^?vg>X?o/5NOd+/:MͯkVx\W>r?˳=OOM:~~xvvߟ_0X_0~i |NYN đȖ@Aqn؎q1SG:^=T R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N u\'P1@q#"Wsn q%T:z'PW' t͍U N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@zN DJ@wqMm 4ъ;v'D "@lP N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@zN -988pw>؝@@;f`uD'PF: N uH@R': N uH@R': N u?{Wƭ}|܋ -&(8$'FTIv{h$cr4=!!!s$Pʑ@9(GH #r$Pʑ@9(GH #r$Pʑ@9(GH #r$P:HۮLɆfo~)ֺvmơ 1,V~?,z1).` \6%%@Y\:/Dϔc?!d d ߸(tut%nlDQb,$ӏuMQ#@G?/' j8~L'Q!gFZa/x ]}XxީҨX+F~qE m\آ GzDtr>W`O\c MUރQU+sR|?nLA fPoXJC P^|xdwH|x7ƈZLlef˫/* t,"U!FL2c E+LPVWZ&FLllIFBbb#ZMnb#J}&6z]N^kmxv6TopW]Ck/{K|+@!r(g=̧U94ȂV:l(8f֒|K؊dhRSiD{iQ*ii A#UBt%1R)B4rwBLWCWS+lKսwAOS+mLH:tp)K-]}d֮N A[H[I:dSMȄhӣ_ .OJ(T2 Ҵ6npU"\!S+Dku PJ7nNPFt -*BC0LWCWF>tu, IdADzo "JS++)m1 `+k+@u=%շCW|O31 8nWU7O4=gz)S=8B`c+{lZh QMtU)Շ;f< {gZ#NW5d:rykEC>Ml/ VA#/٨v]4nc/y]b^:_ ?M 7o[SV-,¥:E\ffd=E[/#b )=ւ> ۡZIM`e24px njҐL'HFTBt95VPwB\g:AR++|tǓnpm2F0t([ݙNф tdUtWΙNF [ 5ɬ]KHeߢ2]}+F+lY2tpAJ-R;yۡ+Fn <#]uCٷ{D*zhLjEBt$B%'" 'RQ]!߃ VwBd:EZ(ҿ X:@8EI2ttX>ޘ#LwtVY)^IȆ,/$eKHr*tP `m^h.%V.gV ׊ʢYS@\η>-^6=ڔ3sK.?_~ AO.GJ iYL!Eəɍi-ZY'rR8P?A3l:c< ݺ/V2t1JRgu w&\?A 㦜7uVU Pqe?~_ ll[oZs`l7ݫ|TL焈ʔPA*ANzYzZUʫȄt%x 0s|s&/'s0"!^OgX\8PT+б"$a5h)阫HIuIsV9Ag7j{d8({˪}\kuD`8|2_w.Znya8( .qf}).WB Nڼ۶߸ gu1?S=dt(>¾RdCLG:9Qo= j%Mbp9粦; XoDNz1iVi<̔;ZdxYyitUNT`%TIn1t ;]*S]8?S?G-*MFz3Ziyj/`\` psB_C#ZۺS+2y7~R-E y Q!wU;xNY{DjvuqGIRl'P3TwōU_xK׏r4[[cϪqT=A𠃄`*o1e*bVQbZ:PjR^!>`O(*ѢΕ T*[ 2nL^wXP#xOknO 4 ΜTKBKF0&vI0%r+e(5/YPVKYA4ݷa<(%i!++NM_[ZaEU ᑚ@'ƹ 6_>i"cQylNw<{7:|޷&}7W\ȃPBaҕgJr**OcYRI+C<3“0gbpbpܡfJ(# 5JV 48+[`Rr]<1)C`;7;D-W3Z&/[nUu<媬u yMC0N@lL>u]$%Mxfv8ѵ2v\ ri/'HuOqui:O/аBO7 b%~y9A\7Gi|pd'H+U$oo~Ss1i*}t..b9[֮ͅ{|d&>|~WԳ>Md;܀1~\7pMJw{!n_>()וF.4SSosSfcyӸ1lPr[}aoU4D`r2*TŠ)0Qc_]Bxmw?/;}nqt:+ zD= `OXvHM&c0 P.Bc]u@Ԙ&{javw!^[IF;Iu޸ߓ &Gnbuhbݧ; ZMua[Z?8 G^y N`UDyN HpJeqnNF&MO?O>w?{:~8knğHn<ϵŭM[k=~w󾰺o;Nr@;K_7ߟ(~s2 e rbS%$Fl|B!rRLb'Z'e.V)BVNwi|E?ƮdS3Nu)y/GWuC-"UU;~O?ϊ0<ʉ]^QW6uireNh'o# 7nu6IN4%_-r?/}Dz2ܬ.__džF]&xF* ~2lSǟt<>+ i+4|V}LnmpFeY:Q_p[/#01@]<Kӧ _^g۾h0n~}3q!v1oz\m~=h'rXMQ9AҷTZ1h[$_mw糷/Yxץ/4ַrGNa%oz,$,{X _=[LBÖ F OMr^xjc nu^x׉Rzq[Я7$Q 6G~ W+cDPdk0%Vh' #SC"|?{@ķ1`"?nN,8I{˲A)2 %#rat𨂖%IU֓TmLtu NCS[S=tI)CO2$Qê ZTtq5kehi #Nˏ_Կ6&N_:+Z`\>}6X{by=!e|A+eNIDo1%XZP |\:dN} S˅dNٖ/"sjKP%SԎ#dSt13Q9pNj,`4HOmO/Tv04ÜK&)geH'ӊX!dL`U pPTژ'. ܀d&QMu*.LB:cZ!l}2 IsCԘ_۹CF"]~u݄LiE^ ŒNڲ>a^3n-u*RJ;&|4A>i[FL1Vz1yӽRܡf54$c|8sGuO4>Ix_2W@n:ztd>?ZYٝvX}=i+"BV _Ab[ЎTe>3ȆQy y(c\\GJ!ǜm41 G`]tKIɌocp["U8:\rG(Mi2`֜;ƶouCRgEUwAsۤ(*[d=qfY Rsa_h8Xm|<"GE猴.I`VXH`!;uTh=K9*o.Xɪ!n \>TRy2!FQ$>\Z9=D]/N՟6J_x8c؏u*rҞ|K4hpm -qߚJi 6=rhIl@Lءɶ.1BxDzɕK/Ji+0h91t, k,4H,Ժ&^XX)$z$F)"T՚sE$\V¾;k?uB `dIƖyB/Kj^CRV2:.Jm*܄ KxqBh3FJHlјMW\D;^vKen@53d*$6&RIt RMՔ̧ ˶2&k={@kp ES̝NvQ#.3yaJ P' &.1*Y쳶߰򩫐n@[KC8ZEe1(^7~"3҇y-n<~K^<|릯3R~^Ԭ5Nc}jJgQs6]W r1u_5OKZm]/whKsij{oK~y5a^҉ŕb8伄d!6^Q[u*=@'TVT#'d:\Z!^Cc5[Y$ixP-ژdΙq@4V*tS  BwE2b xTNLy-1])FVDnaJU.rlS俜@/ 6IVejY2r)+xiVs !6{PksVP8+B;ٶ;wz1_蟺=w sOWF<0j Q٩V "?xWc urVpR܉^%~d^,@X X ȢRqZ'}CQ\ΔJJ(KygPгw'i~(ӴSNV?HdF}< GmV^)DNi2F+ z,F()"뀡\6 2 hЇ(ɐ8*lă@(Dډ$՚ۈkchj>ژua|MCE;ÖأBqIMQR?[(UQԻw}"j{_s'\ExU[Y)Hip  В!ib/"OI(;l]' 2{U(kD@$+C eg@kDwXmjmqg`Ǔx_d^%S yŁY>CMWe0Vjl# yR/qQ0ǽR9Fr,Z%[2Qb}?6Iނ2cyHa)de^< SyʜE~ z 0BE&D#e'\Eyć[uP (tUB%/-(K5Ѷ4UG54:EJ4P^(<{pM^E2WoTyc;ڎK%,6U Y-IwWcV]Kg&5IG)XnvY6M:Ecl< $hf_bd5Aǖ$ꪕ,mPD ~nC:c eL@yM>t TQ͍Q9Nk F.x=\\o* LWMm Ʉy2 Kԛ]RW rpɑ:xu *r^9MtT Y, AT*R&M-"(:sv:rU|x!˂( >UC^:f*/3=a,C k#R:)U€dJX8iPN^ZJ &12&Ù:C9QPQePOlV)4FƐ:>Ri ڪ rW>V5wLu;] GZ^5x{5"_ubʉ(aT1-Tf6@PsZ,X,G^jJMFvT@ yb5'2j: *0`˸ "Gҁ433!]vR:Q><-f;@7<683Zǃ8Ľwq5+/-})^ Eb-9#0|Wc {GI9j`R2EҘd)*(Vo"S:h:hz@v;k ~;?xf{ҴHLz0M8-:f`e "9PRT,dSk,IQ?pȲS^*Vm/Hou׀YOH,= V k՛`U`Wbڗ`- UW/\Evuob[>>r !\i3zW0a7p/p==*r -#"$OWbXp8v-WOWR\G?ծS)ato*KD_*rUWe0% \ɡApK5:v193+J}2a)Uo*{]=\E.8s+P:wY,D2 CΒF˳8 !̑0yCl )0rJB+'^2FqQLdGV ~Hl-OgHó|Δ۪ﮍA&j5b0t-X@06f*RR$\¿oqN1{&_j8qXvc(-'U29u&ױF΃B, Ƙcb>g{-ŦwXm^DR`9 2 ShK6C4S꽦4{ R2S4`!ZnuS+k%Y> kIY H(򩖜N)bxЄ׎A$X !f7m<}MKaUpp &xytZ_Fvnn㇦ _AI7 nGy;J{o& ]Yܵ X?w֪nYjܕQ`4H'(7d\&S? *g͠vV)>K~]nF]Rw[~Kgmu46iuv֝R[o%*nsրalݙCӼ1^=ƚZSȄQ<roM .vtH9wy{첣;*oinukvQQxffMXTt.. %7]IǷ~ΞRyw+0vZY̔x!qx]OxlVE*zYS~>)xDu ^+Ĕ3)DgB幋A#9+n廏R;']$;E҆DGck!<)zv>jaz]1"w)иaӿH^M( ƻp/6 㩥OcLS{l R]FvKmVMȥԧ`gl8kZS"~΅9u#fG(4 JGnѣt䒞?G7M}rMF%X.33ϣX‡à˳ f+}$”g"eH2.M545*X͔\{q8^wGg5hZiSǿj%V{A;:: @dx֦ oTx]N3|YHbMpҊ" R8kwSĵW#)Dyd2K82K^|rFn ! /Βϗq}R9KJHxyÀ8q٤d(Y}}zQ6(UwGjMUNX-rI޽HZx^>,\|s4$YScH}fSVjCl $sAeE L aE//r ?L&Mް]\=^ۡYe-xH; ɤ#_G_W[?]qݝe0ւVc0 MykT4)(;a1R* A"og<`w$Xxt`I]>6Ÿ9C`f ;G)H`|@?x A(:oa߱k5f,`!PffMVHK0"KXA":z'ȯm'E >CDJ$&]$ ohQ5֓zu>sܠ^9 c=4 `^ ' 5'tKY>՜9؄iQ.`M>b}?6Iބ"ԧ|6[3  e𩯧-܄՗piCfq/0L u<$R5ZrQ0j8ȩfaQ`tG:69h6*Qh)h#dzژRރR\ZJ"MDmETR.궅.Xv Q^`5jrm5>`& Oq+T?Tv64J&&,16Oi\?r b 9͗[f=SWWR?N$ד^AVKҍſqHZ3| KwKFLn&`MFeu'yHu iz;yIѺfRa?G -ShVڞGyaIڂrvy-?LcV7y䇅?@\:n?'{8&8~;eJZ ?  ,L2u0ˠ g@CoIщeAwyyˆgFHeIb.Y&X}~ᡖF_韣R*KHh1Λ@ef#Gz| d^El),EO'BKwLS!!CƵRmOM^076n'wma\oMjG|-.Ia,Ua]`5.biIw 7`e7dBˆ^ĴxBF> -a$EV 1v,Pc>g߱+Fa*-o郮Z*a]*J39ZYK0XfDD3M`Kw MX5 3ꄷ廐Hsq>r :d6&6d< #EPW&L!yU>H}ME,-SH 6gj{_ޥd.;^dL6k|k˞~ֻd%˞62,Q&XOaU!cPf<wo~>bIf֘Hh;3-:; ef[KPMʑF 钟^Vw^EK('764x!0&ɿ9Ze˻%zYm=nS3XIQ~^.ۗf)5I1i lflDZ2DP>")F} v$)YP^O1Gm#tPڛyH hLϦmZLk$xi)͡fؾ?߼#o/[]<DA ŹGIhJє!2h6KY;D*UM+^q":/`I]YP2H6ާ尬"kG#1«^ok+H]]~<6o  G{TO:ܑͯmqMZ`P‘90ײ&f ^g=.rrpٳկ.S}cqf\!'BT\n5GmuE1NgOj疞%%5Zj SQD)Ixe$Q22PXkȱ[÷;^ժ,} sb]u 欴Q&m޵ ߹zO@n[w--tgeGe -,=g ' 21 5'O@g!0z-HEfT?.:3mw7zK|YTxsa|ft-$JɎlT: Ed$y\3x#AE؋E(sy5qgg:p8+8Rĥ sXp< 1 KA1J@ Q*p-P!FSfrZYF )HbB-c"'9޾?.C>g>^IQVRWן Ersiy|w{u˒D M24'B\'}?,$H^+l@4%)܌: 0^q0:PI|Iҿ^ǿټVB-0L?=<^g9Z"ifPbA1s+$s&<>-|:]`F{uH-c4n@/ ݆ S6Gr~9XoJ+)A&7} o?$|y]}.ko; Þ_Hf$o}HtzۆK7iQ7i+_"ͮzxz]m5xih $ ⻛  3FS>g/o-zgtOt!Ln: To,>w4sG7QC"j' ?lU~XˣO>N^I ]A [#m]\ Ffw:.MY$+LKH4QFlOq {.'HZı(RX|B3k=yt/}>Y&O0DihҢF9gMV_jbǗZ@.Q#Ǔx'Y\5f;hHf @z(C8G<X&. Q`22UnJ5=p%l+[Zto+$b u! 36n2s<DV1#tb|aU+H16 h@k9Hm@Dl`\F$G%%hjs$({>&4Hc/i鲠'Vm?%=g As) g4Q0 Zmp X&e4?4l?Ҵ"ETE:0FY$)u!K%Y:a9Q`e YuԮ"W/)3^';|&h9l6diH e|PVf+܎{Ou,ٳQ s? t߿9W?}_}_ͷ_ #܋(䗭y?-H]`_uz~Qy ~w_>v;a6K(Y}5_Gl9n.աOx5A/Q؋ӯLU4;V^*EL8UāTBR<$NQG9"I0ܼ:Y|PdyjK׉r翖T_NșK~r6ބx3/}6]q T;K;e"'{%6n.4`˕kf]|k%. U<_خ'AS⡋ۗl_#ʻCmnߧ&_lCƳz*F "eQ!x&_~ꑭܣnhh2~~wBʷt$;tpjtq{3u]ϗ-䩦C\Xq2KG?[~[bE;O>s>=S(oTWpdFYrv[͟pDӡo&εtvKu<-4 $5oX`Xs_o{2A_;wtZݡ0}AhݽC((7GYfyy>23gNeBM@fk %V`fܻ "R|>;tcW~L)y,I{˲si=Sc( F BR~R+IGlmN\޻<S}X]w8nd8ta8q:05}Ńm_lMb( ZOTlK Ӣ:s`6xA,jKPpSGPɦ4a.f4ƒX$´[hS_$KiBFG`0dr.v?-5rPiEbH v ؊jmN<pE(뾒M^wtBZ]B;LR:c*)V>Wy c/` !ѣ@ףRRdNx 2IbI'mYҁ0LRA v&8Lh,qdzIKW[3Q C6*Z\ίG{JEtLa0NUXjlqW~t:h:uK/9EZvѓxGq'~> RrTFeGm Y-ڶ.;OTؔQz},;c)1 A) <OrYn},xZ9VKnإ1P^y&rdmjg^TҐ e@䲵$Q2.3&d%$2%u&ZԱPf.Bx0Zn@/b|ͦȞƷ:G粟aH`X#&~2VǨ5D!6 f*LXֱdc-ƺxqy(lkuc ,pK2t[ӽ'w SɁM4k^زݗ·~zE +PCY별N\(0둲IIHN zm;!: r %^^ZTαN!Nw_K|Eas &;޲S`)ks#iph4";:u]ˋy=|35@霂LB*$%TQ;C_ >nG}WSv,A&K' Ѣ2FrPD`vt(L**stוӝ&fuO l28i!W[L  N`ٚuorohGU 1SSQ@1* d$ODV-Jfa-FT& 0؀l*/{ Wm^p0h (hÁiHځEO=*켋C;&\4k5!5 ۽nPFA͗E&2rac,A% MC$IdPyͲ'bsH< uꨖE(X Os,XJ<,{ =J~"zSON\SVzYcN۾7|Χ]uvΔŭl:Cw;`:҉FeWX2WRzتTQ{ai:I+N(@R1H! l5[ck!s奒PrVVȾ)!"p )3"qlU<)O:z|lG){@2h2S9( 8Pi0g] Nz" Y( EeoG%)0lbL!o _){!f PTs}ZyF"U_>e)p6fpҡ͓ҡ㮝C__}oٻ`jj{yN^&(ŻR/wS{KM?_ c7p]$DơJ,/66 EvXEԑou)3gKJ%ᴯDSQ zrtiv|N6'FyI536ԻfZ6rk̅vW5l7Uo9ѽWU}zs}yW=n7tlq3eqf sjLNQ%Q,,O F[g0,(EL>) (Z8hT-=*vr+m*c#C0X[X&[طDH\DVG+8O3v s(lvԘibI>Z]A*K"h^Xh\ɪbmKUmiYU5)hd#sk~K!fpީ^q07Z͏=d[F11Ĉx>Pf5cL>AHVJ eDޣ1m;Q{o5(ƂJe<R.F%V0[j:~<ԁyqѭkHfd_^4yQM8@z!oȰPE)*H*C1FR6' B2'&{ڤċCfc_>px+Pغj~|Vo9Ou\QَHH΁y̟+>u[Չ8]!b򼴨BWd#I3IzJlA)6_ֆM۳f7~i_M,R^v/}ݻAw-_!C_b%+Cr$( 2(B@pI8H)KJ kjܦI+P!qN(z4[}i <V4Ϯ,2w}/|Pzyd"5gLh0?;ʸ[dˁzO2zKK$#Ilq}yy|9-5Fut S ?׃F7W%pk7~//Cuޅ^Ro5EX0v֯+w'K}PWe&}O|! ]z_8nw_ww'%R^ +\'/̖?du~lﹾ/{07UNo|}YO2{.Uޙ~Ƴqj l%Wݿz {}im<w\wy繳Hq?(&mJ޳]tcb0YM~^t t Ak%;6JBH3$i|A<|Kdm IA%f("J04.J֎(w*K°}n癥/6| /(|x : =^/Vg, }NooЇ+ y ԢNRY2@T<I]=2= 1<IZYJj6$%m$ TJcNW%8,]턖@tJ50r;Еjߡ˿]1`B ]UBW`x츗UENtuӗ=0#/zk_{-8Jyw 8SOPQڗX||wdzKߪߝi>e]'_V 2lyA~Ss3 |xNp%UEptUQnNtuDtf_eL'tHJ"lskAhv8[`Mu66 >eFJpFI$8voʫA)Ė&LOMهqbgl~-r߲.cɻ3T_b8 !̑0yC^=#nZ=*0KR~#<ػSҾ dڞ|}rpUd{^j_kGP?"Cg jQ]ۓąDNgQ66$M>5MzM@%Oa"k*ry'hפOI?1-3( 6"v:ROOϦEM-l&,lH~>OV!^K%ǎOz~u9 @7 _ôŪng(H$5&\.%6,8A2 ǨU1kQRk *z1B*1K}`rSyiF{ojrxG`٣%DoМ?ZZY:~.>z6̣ <}1_R v_S(]F9*j{2י%Q)ⷡtV6 b g jU[kr!m=M~-Gµ!jNdB:[_`"7Z!eH:f._''Y`rrGI$}>T{i<U1Z0ɓͬOf3,υjz ThkRwr?}'C36Lhr?2faW87-ڐSCWwp~h}MaHV{I̫8ɸD"ihZgZ +UɰZ ,W/Wz9Gˏz0 =|= {&fC랎&zm:^DfXLsR }`96˱csSpSkC_qlpҎ#0룏ׯoc/{[ OGQZFj:ݬwfAvuu} \2^i $#/gs1Άb Xe7MgXu(^׃P:ކްH6> ¨7rT~C {կak-^6FGx@|X/olSߞËya)9~\&U5Ns87wՌ Jgp:*W00*~[~~+c[6~vǝ\cRDM[dj>еt=|-B;ҘR)^D A"og<."& VE*:uUR,(:o;cƌL" e4kRҒtFΎeI#y#"O$&|OƩ^Ԧ8=ߖ`kI&]t g3F{KnCh:b;Qh ^io6,?[DC0) ੪'W(Sg}&I׽ 4NI~|jQP^5x@_<&{$圌|NEMwHBY-.Nq! pσS0#xJ&iug7I֧rjh΍B]^C]9 ʋqo DeƦޞ7>E5k]}C-߬&wۅPͧT< .1RmYsy]wயde>]_8 v 0ZZYK0X`"k"Q0+&9NxQ >^Ipv~t. V;,ޅy Sv_g&'`׮NJv&NI(cgV1I1ZEm𽞔'% RxoEqR2,;aIc/hr^EǥqT/QDyT^w'%tR9rrLaZVj4S0:7:OV8w)2݁U0i? *b,hR S /:ps'[8B*t(8֬pJ!p6TޅO{B|Լ|<e>>/_t3rv3?zťC 넼ۈƢiUӘzޡ]Nv_^m/ڝ42tjSzeȿȫ$ul*9z)mD#dǢ/: rZMm:ef\f9 E3 vdݒ"HK 8%qz Ao$(JA@ĩ-U?wYo`Տ[)5aO2WhHއ.8]]sr+zLR;+|4SwUydR[ꚦ$7=ᐦ>F)H`[h4# MpR!W3x!;+C;ƺ땂.v:|at_L3Ui!#愀&;-|)o4Kkx(4y@()H+΋(G"۰QFvA/Q")Sz6WKӄJ SBA ݕ^g|K>W|;9ؑhz)~\;5 Gฒ:Q#BңāWSȑz)r^#GB@A9D&_6+0 E" 9\e$hSH2)='2V`B@NQ@Zp#MȂ򦫿%Clfӆ-8ߩyN_Ӭԇ?)隉ʑ򘓲(8zan&hاh,1n3רk~;o$~>h8}wI[,V׉oމgo#V狼lQ>@`|>}:d@d%]QFQE.4 t=}:K+j![p)c&`ehNd̚|٧KaNTtBBZGWʾ-@CzbOˋԣ Z'_]~ - LCM_v#y:[ƍ`Stͩ껨x׻WZN{n"ޓ7hė;&u'qfs~9F_nrM%\/n{?SOBbt8wCzR:Lb?R'=!?LN~?,1,N?Ɋa /K}l>DU:RK-pqu/Y Ko[-jo%FҀk򀷽|6pJgb}}uy~0cX#=զn5K)pszlN̝~RG6'#b+~sԼi7kΛ[UlkGM۷z yE\<=ޟnnuZBRoҊ;r?z/5EZ)me.ؾ "/svב{'rG}|wṯݫCf};Cn`H^ɷǬٱ@kIw_? JM|)uטyuiruI~;fݚc }XwL?rpþ+;ku-cwk>>|3gD<t* eTCKIi>/*gM' *)ImFWJKm ;i 2Q"/?Uς}tމRb$3A U }7)fqOwTƆpX5YW8RNTk/0yZ==/SHTe{YtCcY! ɜ7ӣ]Ȝh}ɌdoTAJUm(hKLO Q@ Mė ;r" sL6 ŬuuF'řANK?1o5$bdMu S_ |d-,vB勠EN@5<'0]LiQУdcaIn&1f6qoc.Q iZVD' !$O!\CL,>sKKWegok"c]%53#D[#%"3XrĿNlBIQ "AZN";P:JvFQk:))\b))zz$z*))GHI(#",uj5"tj*{%* Y ޷ (iF{p \((*?R5L J6͆Nfko9-"ɉnBHAM!cpV;x8iq\ *\JA0 imMY6$/Z @Ydx h98aahĔI{odGi>A!Y IG*L6;tcCE#l"ē[ QJI*W`闔@P|=RLuih [}^i} 2p_tR3z]4hI*O<Z)FY=ؤM.s@' ߦ=^c[uŠy9тq͗G*ƶ["G.t14-?2L\nYOYn5ѢV ragU,⒌Tw8Wɦs<J`mڕ6Fֺ}!jcָ-P,GI#b,?l8%#tw p=Rn{^GQA~n!~~YTr3|҂j_/BZ+^,ߟ1ٲNAAΛAv&h(@Y3N3m)qjSΣҞuz XP(`#)!$djVDҪ0 9=%r9[S $>D 5R%P(x![5f͆sz棐=|Jў|(I%be Tٓ,tdd%pNhJxKҲa៍?T1j9R#96}i)g:[~a:"|f"EW{uxyɫPh2_[?/ژHן8QeMA'Oa XDF,TPJ-&r#"EWR*D GC}EKГ#F4<$rR :Hi l85* ccمBWו^_\_}^ūjta88[]}혜9 Sb* 'k-X,)B IK/(nur BRBq[aS[yS>±g\Dtzp#6/N9nfǢo~F>Z]Ad4hG{o}  5*(E HN:-UUu!\29:;ƚE*+G( $ABC͆~#"Qq27ZǑ(U[DbFVMh!ḕ80%yB@C6RxN)& %?{WF%P[m>2`,3FcOA-*c7:he.M]R|dFl5)"zGBE3xF5T,kÙcr\L1dUq)AIKn 8ɘM]AWGx}z9Y<ĸ:.v\sAd7*d;h.%b'$BmNL Y_'ˎ⡝+@=gαji>ibXEp} ~ݕf}81]jK ΎVYn=:c7Gs%oa wY ٗgz?`<2<+ W߱}ϋO'êEjn"3B!-Xzgv=+^}^h_ 00@"5Bח\x8~0Wm-,cJ5`Kç $f< 6ywa|lyٟ~a:,;5s ngٗ˳mg'g5<뙤/Ԑot9~ޕ~6궷#3'_)6 VSq^esV"Ͻ vY{x# 1A(J#cEhAvl,UK#@d)VLSAL-.x_&|"WM|X瘄3ZSOLS(k\ާ8ptCϭ|}&\$ou56f%~9Ocߪa sMskÆtB|J|r~&~8,ܲ@ b0is5sVZΣf,GST'_Jm@{uQk.8-6A}d*&g ۪҄% (+MadUB)XZ)r1PU^q;Az#*0!T'&bHӳ_3v>}x 5l·]`G~,V;?s( k~ %cIS$qrɫhK]D)eLӚ W5Q̶T)Km>IId#کԻMx˺[^Y=)bz{_Ҏ8hz~urD,D}ne+ (k{q0PV۵hv-J%j?j;z2Gl#EqJ/W{W$Wk;\=IIf8pp5C-};\}3p%l`અLUkׂu/^i'⋿ -K5E>-zb+3F?wi嶙d N/Õn,/NήO&Y1qI?.pȷ|Smɍs?.ƿ9n}ʍCzlOu7?w gHMOcGU}i4-E@r sz?YF~&iYjZĝMǬt3쌷+0K#v };Q&j!TdgD,dt5.IvF,i=QW/$cfg% mBU\>o`d TM\\qdN}w b^ F^'}zCdrEA9 #BmRcc2e%2ل#7ؤMjt쌖Oұ2)-)?6ɇKP\U딣TV-,.`RX"Dc!}!d1эUJCfp̪h5NDq06[^{Fd4!ٳ||fKe جLnZe& BT&BI1VtŚ1< b*h*Bٚ|=_ WjKy @l=.# O}8=ܼe d wZ! Y>m\) Z''&p ڨJdE!R$mQwb!ZwSa+Z7xS<r'P(+^ʸ/QZJ|!5Nz=eTCI@-EVl/GR\قd/2DԤu1cmpІ,ETP U 3h HG.hI[U"Q=6Tn{ La"Xˈwhlj,XxRN,ۈ"@&tv,0:*k8uTT*i^(;i80QK`yUmɭ)e {Qh(UqlU&ű`1:)L`(J 蝝2؊vn!VSY UJ jXzdCP0h`ÕL~`بfgS2zU|JT ˕3fq3L ̣V;@ !Mg c8b&`h&|lr$.lSAI!14;D1Ư2`Vʤ|%"L+%ho.A9%pa7,T R-;ٖ;U Yu$"zJ&vj4,cqыVDr`"JQ#q 2Z*ȁv+PXoT+pՕb1IƉ t3V{ Cth" ߞU(N )JmG=)D!L^]ii; lۘxakx.??nQoi]@jTZI|4x K8 ,k>m9@CT289ZPL.(ZMUzr AMj œ 0A1JP$bDMˌ+FUR0`٢Ł@$6X4_QPʁݪ7ֶ9 -x`LtKQϓ#W}ꩰm)1tYI$ȝLVzhmm9f9TDDX=b =NdD$@ʂ^Ls  QM@wAK8#d ]Jn&C,CJJr AJA3uAN 9A\;C4Gݱ`6QD*lyN @= E{Ʈx63)H$hJI>2@!~ЃA2AC("6Zfh%,t0"UcD:P!YdQJ 'ϊ[L;tҖEsiP$Rhěr\I, p/zigQ&0 1tQP_5۔SԪg&Đ%V&kʘ?yͯNt:_Wq>LӞ/Ne)U;S@ԵEWP7]TLRis`] ~w)jY:Z4kH͵I d`9@ -1iR1ve#BD[5DKh wPz!)B2:eUZ$d0j@=ji^úIA>GWĊ"QD q.ϕ:HPbFc|;?C^g<[0 M%h :Zǫ5:P)ZqEn~QFFmD)3ыTTV nQ*aFL@jhIX/$r`Q)%b)aHՔq'xE>r5WlW6ξL;**C56 -<~0AYeH ΀zQ26/$N F9@PHOj)vi3iΆbRXyx\|  RLQX2H"0DqRS4\N!Jklץ5fW23qFuUvԫ-aYOƓk;0#ҋMܑ9=뺶hu/\< /2ȹqI:NṆTI,!bQ&Aj>/n궕ڨ% ރ&9RTŊR{u=/q4fX}?2SI1}tZ$'cL(q`IU}Mxm{tbUlz׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׋z׿]^W|l뀖ΫbE~BXbR%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQ%PQzuꦼ>/ )F"#9 hM$G.Z*:/\L . ][omJcmؒ] S% E٤I-Yaa _zuo2O-ȊToz80__{feDblhlh jNӈr͵(4}:4-%n8 FIඏdf ;P2zݫ79Q~auז?M^ rYJ-*|_YAyLBLO/ϻ{xv۹Dk7`7#\krf,qunFЧ Ղ4#BgCWWl\IDe QO$]aJز|++񮴵' Q*RIN:lչ(YI};tEzzFt |jU;]+ڂh]ʈ0;z0 Ut((tutŠɈµ"dN8T |Ak-#t(iS+Aݧ+,ʆ.=V!C;N2ki M'ݣₙ BϤ"DP7IoҜh+ M#\W)a?t5gӡig2#B* kr+Dk:CHWqAھtPѫ[,D+;_(V,tu:te,#B"•2Bt:A@V"X37C:B+@ .koؖ]oD[6GBkɑʵtZ+tkS\*HeDW蹫Vp9%=ևsڡ  ]1A ̆PUt( )tutqŜ+D6tp5υ_OWBWHWb=zXt{oя>568C0zAhp 񓲣ݫo^]ΗK\\٤Ut@]'qj5},Ǿ{?Mg՜p‡j =R%q`\1ZTxP.A r:e5WFz! }uҥq(Pf0 !1{Z,ѱ EHc໌(<ן~'cL(q`IU}Mxm?k>Ӭ- {٦\{_ls܃SV&+$tJTԕV^z[ deV]Vzy^rDzն&WYUYk]Bճϔ}ޞ:F-ikbDZ*ÜL 'wqPTBlx>!6"gi%>[YbcglTnn)d<#FVeCW M#Z&NӈB'HJruFtxvPZV^ܜ"]i,ɩ,|`-5QT!ZNWv#^"椑2#Uٔ c}ߣJ+ ] ]Y%2U6Bt()o]ψF`!?NW#+PۡUGJC;FW]BWv=]!`ͳ+kT.th&oBW'HWLsdFt5Ue2Bt(-tut5Έ0e6tpɅd]+DJ0xt%!t<a^yf,D)7Ƕhhk M#|`@˻#J M MKS ZeBWu%gwtRdDWX|ވ"ۋ<֢PvmQBW/BWXr*@/ l^1ZKH Qvq^T3]!\s+ca+DKZ- ˈ/Q7w?Ks>V~au0WsB>_ou)QHBÃwϹL*&& Q[ZGKfGF2>Э}6]8Y bi:W=di?8E?vzw& wFilhp}n`Wg7\]yC &9}5;[vՏi8nV׾U & S0(Ղ;Jx`~57.z$zЍIвE66z7nЇ6-zM2x̿BDL+D0E푘3k`66gf?t&1MLֵΞmz4eg mTmL5U";r_؟],0wyye~,.OھswVc5S G#ue߂chWmp69|C^*e羹Lkz7{/Ӱ7p3BLp?rÂpSͯ~{pܷ|=^hlS۾#[a7nrq4IM0P`H.e 6nܯ(*9*aBoҿwSWԼtVʾܶymph_kd[<#yZr˟ӂ-7MZشrZC3*1XQK'8V.wU^-.>.r^~ݲ܎R³H$X2, R!CILH_ˀITK|:hΈKIn]Z[[ jQ+!d`Sk 5^L+Mx3:sF_PgR+clJllң `\L=|&6}y9JW(=-'yx,q\ATϕ 0 Tb;z? đpH):/+&e~x'9.9Eun;:./VD =DoOI#x:D."j# $UmDRptDÌOCdyH5j傩2w-\M=ăٲ `]Ɠѧ.9bw0s'4ö{w+w=^3~p(WBX86#:TKb[e5RY@\) sǩUoRY{URu%pL28l QAq@@܁G<=q<fվltla&6Db&|$("KJ)}[ͧ;.,Ow ZκD)ʇN]'B*9>U_ ePU=Jt%%U]OtI՝DNX芆1ؚK9ڸ-)i}"8S$6>jlJ6 OtH\4L4fڔ ( 55* Jgֻ Ζea1֌Ku 3E)5JCF[JD50{W-\ůG!~Ya4} Tk-M}tp6tr>2 9SzpLŸz~m \Ek%tC%"$$gHoù۱;^~4Mz2'{wِ'ODN!8MW8I ȈG?OLXh4_#y^L0K ,BʝEI5Vkt *~fܙC"ٽE9i2;!0}7U>kKXv*v)22*JE(TXkNr[Q3%>SXNb|a.w@i@ATPE'>䅲)*zd(8˔<}c:f|uֲ-+-:'+yBx)@$E 3m`5(@u8*рFMKHfQ/:~P433?ʛddK+jWI,OpV*hq|Q2)@q;I X'fī8Khtv3X䖸G rgƲV0k#F*xPf Ēp{Z^ $%AI h"ҔXSMJM`""ɞ%C|81r<%)( $AK7p{{qŋ6+F/W!SEi6ib6PE4T FIן~x}o.y|^0Z$Vy-uW]SЗ/2wGuɸ﫻E1s= .(|;֙g+T6Q| F "^a]2yTQ 'N_a|(ufG\'M1ݼ>[ܠEIj6~\ۈ8կL&ʃצڪ׿_o9PNW(s.wbAEs ~?2P5K ?|cwBjry96l6p2|P%F;:O-o`_M?Lb8_;[i >{/VxFb;v9{Ve`.% >,a [ܬRx@^yjVxQz5>a9;7]xkOބQvP,g~wetqDMo(AI5N.[U RN, W-y|rY]Wu/,//K´jt} H?ۺjuKW-Gɖ[m9`80oi7r^ޙ^?;T߅j4yw:6UwH@2਱T|Z'DOFC$esn h+QeGEgm eE=V+j%R, r iH;^ fQL7OƛSNZ|U4:-&6UӸ 9yDkqGD^BK# ,DTRgZdA<<'AtX> "\"$'!0LFU !JhƐ@aunjiqy'G=TMէ^E@dJRr62l WxT}H i}"{OJ 'uBZֶ4mRwt=eYv[ń(V-b⣞>~] ";w_Ma ;dX/(g -}0)_nRhW/I]VVDSi%D.j[:Q1 }Cj۱Ǫm{v8;m JUTDpK8EF4THMJG?v~*P8zV%HZ1@|a{*G(t>KOTnKO$j:t)g?-Ou $8ӮDp-+P pK8ф#Bx|N0+pd^F:PV{b9>j5Q>b" p `  ! g,w]'N'!޷G< =Q%6̙Zw{lf oWэe\\@j "Ȍi:t({sj Ӡ(Q㚌/6'4hdi KYbW m<~c7H04J?*O0Ux+:PtDYZFS=1Δ 8{E#E䪇׼XX$J2#2hd"tJ(%47JъgWl_-ԦcT1i*< wȱQMUNτE6.rh"۶ޛGoGf(ACq*8zc8pF7vە뷃lI+xTFynry$Y &H<’S,En(%4yb죘2.C1XfS1H90HQJ 9[PAQ{N\wB]aȮ)W\`gqAߟ"+FAi uw*P$X0PZd4F([*/ѽ.-\jX4s>ӏK~շ\Xϋ;Eyrbj1;pWSd._CQ,f{`T nrlR^-`٧?|>zbC90 V];eYXou- u`|fuyu}sf&doގ2-DЅx8*/9r*(`zFׯ#E)2QyZ}Ui?&e$E$oL*qRJTpSć uz3%՝xu4~^v4jRZMmzdh6w97Z܀3?9voV#N2QH ӵ?\ЋﷃͬEEorx,n <ȋXKjQ$ߪtqԒ]7rfxnVn+ M&k=!o>Ta`~Y]VWbfSj|6cuӳdKNSI0^CY^ 2Zʥ7{z8882X$# Pb>ٯ|>A ߜw}_k OZ7M>W{#2gayRAvF&\oZ= :xzWH'6kăh+XqB HY') r ;:'̃DݻRW[/oqz߅CjڔLK"r2f4kPMF0fwp-28|ܩJS^j9>H+$IE !HqvV*F9~{۱pv R9WE3<_ GbKϔ9y yɒw&y.>? zf̀8_nBZ eL:'B tOQɥP茂{:;L[ͤP`tHyv)2BGDh24S󴝚w?;Ɯy=4a9-Er48&1W]3ښ"aGG’z ӑ_Z)EVhQEJbr[2"9P醒cYMig@ZEQ2P%VB%*5>N~ E0ltO*dlҏoPz[!tJ9N ںH[ (ᗱdᤳ5dH,fʃU141|QG0N իPAFC4 բt% ki4h*-O[/|c^fmW- _ܒ1vrB <鎀]4T8[~r8rjO-j1{./b4W"FERNe69G(ڹ`1~ذ+5p޾dU#TQӯ'sXЙeIzL6HHWͻJsK'#;0PB³6?׈j~RM/LӘNjg71,ׅzU!K"9yayiTVjLV nlV:2`t)2TRX!!Gu+c,b*H%keάP°l)tET)c+! ^G:zW=aG?>HW1f0(4XtVdQ"d>#9aЁZPX/7 Sф[.A&H#rQ1CUC}4 \M $m@\KI'Ah9 $$&I4r(Պl1TdLw*%$gB70 g/.pwϖ݋7dOo?|_\Ѕ7NlKƙ5NIHI",p3]>'!+Pp0VSz*FrrD f+}|;REn譎oPU)EVBs)a>#{uvOp1E_*׹7>ﭾ觛C-rٽ)_$b b9$0f/TB}6eL:E2)l^۬m# q(sk2[?)Uˌ34u<->s׳OC9Gm뛇-kKLOfg6f~4b62e~؆[[ia-j itq|dt2NcR67zd^n?./С~GHqh:E#'Uw9Zt乎ؑ\6;lGtpَ % 3?OQ-D5 .쪖F%Kbԇ38n .}t \4s1c@c slTzMr(9OI.<.~eBmϟcl6&yl[e^zC,/>6`>^1z"{0zHeE̯Խ ץ5䵤 9\\^.Zm:e1&Cy Y #? mGwFtFaʨ5Z  !ULK*m֬KQW6DU&B)PZԕ)p1PU^!Ȁ , !T6dYR#2h\]ͲvVqK4 _'/~__.BAAP2V_M'DrS'liӀ@bm"CeUR@R iD@\fi4Zyzeٱ%_'{0ذ)#b/zQ/ZtyC]u$һ-z*U{)|?rU~„eJ$?K]?WO|~Wu_Q\KQ~x`c]O74dߖZ?FgV 8N[^bI4%㬟Y_X[mBA/Bd%t4B>_Χ}׏U)ʁ&Ğ K}OO_~|}V zw#^ӏŤf[sF9:Ǜ۵T~>ogKc__qp[JcC"Zfpr%+VnbN:C\9)dC"ZfprjW֩TW?pE@3bgj=v\J3YWM_ :3`'ĸO~jD~* Wz\ W6=(4+PRX XԂ=X%N:G\IEӱ"RfpuZ;XWJ9Ծ!\` \\>wbNN:C\!֮XQf+R+;Jx\JPW%ɺڈm<稴q+w}ۋa}H/˰WI }ՅPnqiyE@R;@]H i+AçO|ݞA1M4`* Yc48a 1mZ vf`VG*puZTXuf`R;X%L;+U!\`Ύ(:~GdǏ+lډ7c+R b`7+cӛB{N`O 'j/(NoJ#̄C V'?|\բ;X W+i HlW,ط++A+VXWJ[OU`&fu7SP&]:0.%_TU]\e'΁u=n޿:6"Ǟen7Weu+4XW/-vNGĚPm~g޾w^&ܩm~^=n毟݌O7C*~XiVӝn6w=kYFb5 K Fiu3?_I[bquDRf9S՜ncMy@sQQyȻF>XLF>`B\a:zjt-cBmqGf2͢UѠUtJa!׎DGY%rY͸,׉V\lV]lR鄛\3tؾّ.jdF\(1JXHm0M]C7,W6iV{qVBڢ-=hW\ X-ʱUj5 qe,OhW$X hW,WVpj;Xsĕu{qYmd΍W Wg+xY!\` kU+b~"&\#ڷtFHr;Xn&՚UN7+c gO %ثS;hO9Q~*Ȭ+섫C<2 ~rO=m/'˞JW;)lCb6+}+"#+Vz p ; X7Z3*pu2o]ig /$^hZQNoJ0͂};kv$E3kvvV%D1a 1mJ5+\Z_cRM:C\Y6+lW,E +V+Tz9YW+;hWΎ(ShOWmjG;8֍޺"~lŅ'\+cӻB w~rՉJVhGt?vdfn\ W6=Hm44+liW,l+bjb&\!Rd]`}ѭpEj=XMz\)%њpE_"qPb،3j;X֮&\ uM?EܗNn u5&c͏e+G{jd<>5rC{ke? f5㨍㾣)9WDSuѿ;$* M$XofAV]lVi&]l46p5}# ?p}\Gl=mkeQqCQfYñSUn^(}>6(s [NېZz[9 W/+ A6+l۱XOtVbXbWR!\OӭԢ9ʣCߒ3HjǺb=UTi߱#I8-Eg$Æ=,|=E,E$٘MJ*M"]]23ŋ|UyNn]]wnhn(ÁiJJWOz|"@롫 ĵD\j7b^ ]֥&VCWn\ ]j<{=*].tex.}Մx-t5)aҡD)Z Dr;N<`챓c_I8+=vvGkqkɭ'ZsrtAiҴ|Z]G{+~5E0NW>݅Iw  &p_ ]MRH/1Dsٚ*g ׭F]MbҐ/D$؝~] ]Mf@+t8&JHW);r&`YO1%D ]zzE*_ pzt#<8vD)X =D8 ZjjLNJddEt5] ]"^h-:]MJw+>vkL>0kV-qP"6Ŧe5>VfQ;̪'D6ȉ Q|r#S\Q ṯ7@/`Lwn%)]@5z1WD@ݡݞ$5ѴDq=י٭'Z'ʠn^"M{&kɻ#hWCW&*x 7/Z$.m|x߀Q:9>磿W9FFӶ˯U}{sћ͛77A1;?w@^A=jF5}&o?{]Y:3C}O?9 mߢqtsyu~`=0Bpn~m!8r>п\~W~էo7f&>1a 7Sߛ_~n&7Q{3򖸏[s?"?׳7z;:R훏 #PQü|Ayu~B, Ӣ|B|k>3 q ˑgPO7C4Wg,_A?>7+|/f+gdH/s۲f \T:?duxK$owy_>]@2xYć^>Dru|5_vcf`/6'g̕PВ2JniQlp%ϙu!''})8n]ȹZKr\J3Y3,lFr}cX:hg~g?-48aCm>! (aMbDD>AR-V qxb2{[|..w$B"s돈 11ØfhkH[(:ǮJ]Ҩ%g 0_D4#]rf^^m"ڦb4h{mms:cM!:då 0 Rj@cVyGM#f|% !cq) A <{"iWOsd(ޢ T,JAJqn3X]=x\sVUp\s{QG*FMB#H\!@x1$ÚnN'fNܭғY28:Akc0mƀJy mBiML~`) VCJ 6Ԭ;"$UWX֍OM(-BD-|vyZ8j1 W34(t9+G6Ty>Y;g ,Ā("XtAgxH%#y;acC9"_ E+UK`Żr`],-G1أԡC[b y*6 4ˊCqlkˮ>fW$Xty\pl5ȦPXLĭd+LŅu ;iuΌ,Tq=tRUAWnE#~c6 6JsAER6[ Ze3@=b_nݠłFi ,4߬3(#)0Q_{0obqn;/l˫~˚dŸk[U0lh3?z;( m3$00!{Ay *}d\t56$[ZmLu1 %$,J肸_V~ws͆o@kzQ=pv&ӚK%21..95.WC@ӯpZj5jAU"RWzՄkWc h:]MwtvG="V NW}VCbvXµ)th1u+DImKWgHWIY L@Z5ΐ Z5I]`C "\ژ >f(+Ŗ·,}~DWXU1thM Qv+gճK¹bw jrah> ],+cr˙l]!ktS "NWB-]]1V +8o ]!\!BWVɺ-]#]qmF=ZɃZFNWRΑH]ͦ~1ޙA  %|r+G ~sMte칷d ŵp»l )x$x/T.\7>(7i:e%ѓJħɰEZ\JkΌBAH ~*=$ 9g ,R|$DHug7_ ;a%~ 1o.N>c2E˄T{$;ՔA i}'٦ͧ8ei2Oy&f9M:ղm-l˚#GI9pVeC8RV* T!.5ŶT0uŖ1դq)v;K#\Λ҈V~QvV94$m 51J*"k?+P29ҕ&6fЦet(+#Z-]]41tpYcf ZNW ٝ%]YiIes1 oaxs30jIfRmyb/~uz p|};Y5Ixo3}Y0/_۴?G1gq4vPSvb7H)ERIE$4g2~TXJ;*HZϣ&|װ8$ 4MJ#/L_@Y*ܹ+ť\Jޥ?DXYЊpecV!ZS9̈V}6jY(K\NCGm>]vȾ# ~\‚oyheE=Ѥ{uE$/Z-7t*7L0iR6{Ilzs mش]iڹJXt0̦;>r>dLDuѮ|_l;Iǽl,\j%UD%WюgbtOx +{ ;۫*fLc"ի'ҫkW8lwiaVൄO%ޅ#YctWDdKyz'RZxx+dG2Zo!Ku݄죾3 Ga9e^Jd:kA¸F)irpYQ Iͩ>ey i!sƩ1Gm` U ቚHHHrb,da![ػk$h}E:z׻zfzg')OssV_K~`#SǙm!0pNH ͍V&mklo$\Q1S,tI' ,9А8S1h#jOb fyr#>Rugx[6Wu5ǰ<1chʼjk;$zQnnm ŤM]Tw>K練i|WAG]o\mYyp|u|&x{ϕĘ2cLC3wsS\iF|FnFhFs\z%Ā}"h]}K*7")H$dȌOg[罥B$ruNyJgn칒mW=cθQǜ0\d2і'ѐ@<zu46-lb-|EHίk Gt~jMWqXtPݎS^W9] 58-b:;mhdyD1Rd,X 1@q~Pl.lί{$?KOvVѐDL誁>(4NlsPٝ_ ۄbL:MKSVDz聖+ I2^-P > +Т0%JKP6$ܤ+λ-JG{z8Ypkpi7[8|L J{=d[K3JIlL]6rٝ5@s࿐ci2 Rg(?Ի9<20jw"CHAlngy硇=rmyޞUy 䇍~y3 yt5TS4Xi.H |"9iӐ?8(١'aځ 0!yHȎ8}DިymBoUDo)u}yޝ) rmZQ'؄m&Ԫs["+&[8L;v ΓKhRhmRT4WF45BIK2F"> 3xeR11i5P}OhIG?=%?{Ƒ_&w^ɮK NYA3E|QW=rHq$J=B"SzW<0tKg.A2haܦm.q>=ZvKV1y)!s8PQerO@FpvZc KATڀvڪqTR;mT]t] 1 (G63ͣ._'_堂[d# vWodm :RViⲨ޲:vzj67gz#昦>B5ZRw3UMrq]P7>d-Z&8C|dy6˦ 68T +%/Z(-1`"2l68@ӑQPsY04C$| ^ȾvXoRsԅy#j|kU㋇f2!SA)(h< YR܋h: x*-KuIgQJSEJ|!JjhwN.1-zMX n֤5IS(Ԛ ^U^&JEǸ{qށcibҹVdA$V+:#uz>L) 2:ʝ7\ 1qV{t E NVF߻t>p>ρZgjM]l3=מBlMnGr}K# (ry?e'ꖟp&$a= ,f¯k6\^s3*ז+muB,y񸊂DW.tUO,yݢȅr92.T*BWe˩j-[~`w=7;o:7Emzn+&e {6DG8Whu,ƍ@#fuhKNc60/T% -^ c(͞]~e`5G k {wH˷޽ܥ{}uw@87?m vBfP3y}88 )?Bݩ㻱 ;#BڶztVzՆ^m]l~%W_JNhk|ߕ%-] Dwr;:'٬c:g 6[.Hfv5 _M=еB WeMlF]Qe]lwh6zʹFc%m7V  s;8Zju;zCô/-AV=&V5+D͊.tdnV]w7l .ս1~_c|3EgQ$$` KC)#Zj-$Z-NHB≤N܍Ύ  #Osܵ23EvbNP _XT: PNjbtQyX4AZN<=% t#%*Ee:KyV'$4{spia. -LgRٳdo|mfo"Yսƥ82|S.~2ӛo Gp?(%[%l s2.Q Dgő2/ |ϞW"/| lVMA-Ox"/y} V*xC / A#+;Ԗ"P=@QӬLG|~d {㛬T,d^r,л֝bKN>_f8gcSsx:dϼQ΄i(pq[\.)2hM5feaksǑɆҎ?,iڳ=#dq/e>,IC1 A+*:.x"2D_cḧlӆC6+^xY݇HjEK]V–mYvCg.{܁RAx+9 a`Gs@4BFRT } Q 7ph1K6oFyHFǤ"+ J3K` (O ݒuݕ =PJyŒĺ cm[!ʱcJ=1éVM&ָwٴKkqlu[ȝYq.󴉜_e]&N˂)f$*^`p:Hsmqf 4|j\Q^NOٻO>efyn5  I5L7bܰ$)4eEowZ\^zV{#t=` "4W@CWo]1Y@2t95DC91[Fr66kŀ _,Y}]j&sŸǎ:o f~Fȿp0% '"!YJe8&Ȥ@S#d6tB|^Pϟdy/%D FQcc1S[$ 摁\",!),NriNN3sl#7J{tVǍI'-bU8D ;wmxBʵ]YHŤ#"S!8Dۜ)es\f9 EDOhysD{Exii#B'4wā{J 'uwBZVĽRHݽHJU 1ut=SdjbR9c6([!SK̺˘CB2dՄf93,PAj NH8̉* A"mM?5F!fH |rVk/;^sMޒRgPj!S r85HUeV~;tuM8d:h]nOKG#q$T6"o];E}"HE953f t0'3,Jb/ĨGjn-` F02Rg Bؒ 1Q ThI $F!:e&wq84yj#R}a %*ű=n'^aQmθ޶W}UfWȇ:@"s! JNDe*L-*h4}g͜=% ë*Oxբk5"{ *%e jevdt#IL(>JsI}3;0j=1 '!K)A8I,xrUȽ3^<< OBK)]-e_* " wrq~5W uhP߾}˚O^7[{QUfٺk!H/ c~b(kJ$eKI ;ZqjBUvYNu+2JlH=CNLZmB.ImN@72v3qdUdC0tBX+n\|lxf:owt||zy|kSypJ"10YiVQXlK*"TtElLrZVHcmEv)63bw3qĦӓr+\ jwCQ;vF8P{0%2)X#?L>&#UBN'k54}H $# d2'E Gɢa)ܴXbKBR]QYjƔ:a7s9mЀq)?Dnj @'HΦv$䳬%bb*ڈZ Y,^GRRse ZULAk*epY2im[ 6kʽ,8"^c$ڶn^r(.θh.\|XX'U$G/ wnl!k ):,h VP>茉5bkBrf'筯 āRQkHΒM {39{: .tړ娊;ޒ{73r#(CjW,lU S* G!c<y`RJ\zDnȝN_8{LHHc(Ixo\y<,%ŏk%UAm đm( `ŠQ 50cبM8fΞq<M4U/Fxqnr$Mҥ@\slc׬GIA0|0n@kI+=m'B/;35)V=z]_T#Oy:Km#:5hbZ̹VVͬXkkJUC Q7(#pGXZ7NԫwS xeZ''^D9ѥĥ(%e#.Ȓ8;G3ߎɧInbzȁgU໑A#u3͠2ہaTr~=ߐmvcO)5vJ!J&= );YB3emvAv2dKXs&Y(TNu%%l*mNaɳoٕq1.ey0ZVoO@Cj"4zTފyY BOwzIf2*kV%o?]|;KV<0OqͨߓA1_bgv1YDmgԌ #:~y|N/ʟ?>VGҿ'=J` QxO3x}??Zg_N/f4+k*YlWF,k"sksnx73Wo6O[%/K j'#N3ɱ"t{]·ME,?^I9vCVذEK_><>Z_|爍B֧&DbkWJBha+X-P QG0Mz[VHc-c* jF_3W1~˜NOʭs);E@`TWP8c01BT ;`TJ X*H~G^4}2'EN!Xɢa)׬.!QBjEFԻɹ9Է ?DD}@āyM$o%ILgYKJTP7X8d7]1PʣܘQ=)hum8Cr9K&m fMwP7sGw,6KEŁyT'&S1‚1lS oH&s9cbťa783 v3f?k^z RGp㦌bvܣnT?P#z{M)G\⣼~}Z~sM,Ztq~v\{tYO)SA'QF1=Ő{W<5p;q;&psɼo7'eٽ]H ,/E/vXJɄ (ex)| ;%[kkP+0x)(n;^}( ̀NjxA+' 9-g_EqN}׭ܛ:yrzy5~_y]?2f|$bn42g.qqgwn'ܨet>R:( Q*L Sd/ί69Mֽk]1Y':IRVeȃ-$B o\k\5."ZH@%' 5d mM閷WB43҇& Vݘ. bq%E*6YW(M>:xS{I2=-./]gb8ƭڔ Mf]ڛvǗ}!K`>A_W_F .v竧N[RVtXK֒yx,%%aUDde6hɳDyBzaAа"9˕CHYϚQ)m(ߨ}6]{ZB9% }xd jiN6qNVs !BTULdg/9 fL2KQ|$L&ɤ!cPQz8wQ[(3<F+<΋)<.40宦KN+`ZmN>-ޝRWCv[{%W=WY[V9I+IuP%4ɭ\uw6zCJOa8G-lޙ,+9l.n_Q'-1 -j^(&Q >o[E)ZϺ[*=LzCZk6one+n2'0X=NV}5sgLHA) r&-I"yۆ-_7֋Rr f{|xcQ:gVy8_O/BDEc e & ¼&BP5%a4UTsc%bDK|Wώ+_j&+ߛ}7V{ `~ };F=b[ZdiigI$ݮ<\N:a?"90RL,dsk,QV8dY&d^s3r2Xqegs(Z|nU"vovM  E^M HbH K$b˅Ad< (L LnQoNcF1Zl*hR`hQ9b0XJ0%3rnv )\Nl=Y#lާBgH%a\mLah 0{ͣ:{e,:"?󿋳LS<`s-:pZʬ w0((Mz(n &&VQR1qV{U1bRL#,8ZI}`pB{N5r$tm}~y[Lm/ ߛzdQfx,LċD.QO%ECx䢏xFPL=x-h%C'aʣ,g\3LsL9υЯ 'moNV!PF8%!bǀ%2)JjZrCDw'tHAjF>9+gkmv\ޟtޏN;!f@=h^X.4r;Bc`cI":GG}{W W  5OS1x11A0 I)![S=`o)X%η65>Kյ'%nNQJT72?^!i# 2! u:ɕ0y')FX#F[ˢˆH\xQ%.T|"K)av0`Ǔ0)SSX'0H>Sg4VPF%4l4.@lK8$\?aK {iy{iTOՓmh_%,۟VP4v=^]öhc5kg ,>կWDw#9ń֍A$9GVز6kW{[mUoVwYR@t*WX Z:-V57:{ X~1f YR)߬/n]bo"OАl\41߁x@["` AVKGq]LÅL~5=aDϪ]EhjtCQqKeӆ6HL r;5i-Z.u;{ Tev[טցbUU$zh74ԸuewnWDD;ͮ;65+gm^x GOHxHH۵\xTht$!vF[k'Z#@/ )ɗ踬#@v-LHD&vRK r9^7J/qgyWl3)%'ۺ_ƒUO8׊T&!tx!}TF"_}oH+gBe9pU"cVQlJ 0Ч0!y'Z/w5g;Io;a6fIr',,1;7hr^EǥqT/QDSv vYRfin,:wʗ.X`;0)2aXQ8[&XE hR _uxUu,Mֆ5xb)!9%PfXTqXDAyAwWy, ,BO^Xa hK#D9aL'3f85 •H VL "(DmsCGYdJJtdj`tbQԭ 'N⃔h=^ f2 @5”՜c*up;µVH 5+"mvr/mtVI"d F .$/߿;_w0wo^Rybޖ_g;UWMcN_Ujzv9-.[7c2$ˑnPts֖{SiK,Ԁ;$>3#擬RVE*S%bQ5ՙwr~~}?g8euU^|Z7X φe4e:³.Z_~;3'Yi91˲Y&N2˂lPpӄ0z=@W?>>9N}m*Q3d&ƅx5̛uIV_˭jǟFuAeY-뚥xDM‡+0IR ӹV%I84sUMY~j[;7_OpzvNg@)/ɜ.޶ f#=|./[R-Iܲ0Uݮ\=ѽ6(1)` ՋY{7:6,^*~~+}EN/aJh+JN#,jEB@-. plM#aAF gy)=9͎w@'gcd탴ELn0GvPD8ɥurI'3 |vӬztav+dh 2Ƭ!CgL{ɑ4W}yIbPUӘе=_<ݞ%:z}#R*NI*+z| d?ݾA1>PQ1%~p"% <D!T&x< QK'**JNyZrK2O%,R#0aiQaJ#3aj-u&a[765X}NqkK˭.ЙTTU NF`3pcUfNe..;A:W)|"}G81뛢t)rVYq3W$fT'Z_Ϫ]ҤTJ\jmIV40&TVFrl 2]hk<3rFTfNReZUr`)cL>EfLTL>]2g5dYuk>犝wλ^HBlM4lؕy 4 Ͱ.0c{tTdOGr\zxjGܸ^q^M>]8pyv֠@)d6V}e;rnCtUSw,RD(|۫8縖A}SB?JޜX)UJx-Ylc\KƱ9Dm)}oXn/Gѝ~U8YTH*ɭ.PȎhCyιJeuʓ*Q9sI3F"˕LieE+cm9)ZKǦ2ح3!EULVVfn!2 Yp"7y#Qya摭Vn*3PU,RfJ$.-6_L+:X=jmY@)6 A eTVS쓤 i,iIWmd&R^r㒮D†&K-456LZŜ&";i9iQ쾸B`pEr WVy?L*AE\uW1! ['"6 :}tE*UL+ɀpE++RkPescֈ1.A+T8Ʌ`I Uj."^ ̖7],ĹWyqN=TJV2Ws=؎`:\spS"pD\WKyRs*Wد4qA\{pM@U6\ZpTZqA\) s<\Z-wE*"Jf}=l@ k6Uon-dsx1٫5$V v*e|gELA5;.\\,\coʵ?HWĕΙ 8d ygGrϽx;䲕JbtE\Y) ppr- 'RE\uWN* \`y(\s/nqTBINz\]yqN-jPZJ$؄b\AT8bJߖ6$)ѻ\p7`B PڴuvcΥ $ظ`0Mr}A+y?L* A[B!Zn0B qE*E," .p+kg&+TX,",()y@".\\Ń):&R+R\Uq`AU`Y#ArWy_Ň*+3Zo!N0{F\{dZw誕JmFԵX]@Ʊj'PpEjWKqA\ +pEW(Wq HT d% CW8qE* pʿXclBXۢ1'4 4 I1*ĴvPD$W3@jur; ! k͂ ' &wv).8THP0g$$PpEj TqrLH V2` 8H-qE*MՋvL/cZl'X3p[Rn݀*_屒mŵW3* W$`pEr WVrqE*  x@BZ@0"*\Z+Ri!⪃IH ɵ*\Z"W•6xs-[`x(|ao5xxʷSc>2]clcs9(heesu~βq~kl0e|b,n8TMr %FSlR٨)vwRlyU$3p(7@mn͝u2 L`g)т1 IӤ"izp'$3Zc}b+C[TCm8B!]Z|T<⪃RY \++)!*>RuUq*$EW(WZ UZqA\9M:(^(ASo?_^W^oc7Χ9l哿'x%{Umo 1ܚe%?K/l/Z$mc^F,'.ۯ!Z!EY,)rWY:=/w!4\S;R;?MRzVuenj[B}|yuXRYw% _@dӶ8{Wɹ@"; ~c:q{{b_a`G[ Iy}3M>_R|!Q>daۜSݧqM ~Ӓqܰzej{jp÷t~A; 1uLE,cl,BՀvm ^eI-{&zTm!%`4 :ຝO <9mRJ7G?K-V1,d.j gR|gy9N-C׌9 h4'N)ngt#C.GN[$"y+ }|*9E('7W@/ł?9&˄?,~kO৏ Ϧu{Yad f.X2g*eɓsпNj)߾& |}63'^by^N_3dL1Ylggw ;a9˓tXwtܥ߶3{wȄ\sΏfEw޵]2q2+02K?iqK:cT0Q}xd9rI=.NQ4+1N~d0Y8ddt}zkɹvFݝ Bn|41= Lc0~$h(j̪1=`Xj{ewheum4hÆ.ޖ_z9%e-w&9wvN)b~ӮhJ&qlqֿ3|/qMc1*^q\5,i o{Z ˇ?Kˢ4KM7.%B*öۺ^ Bqg֚^JgOq- Q"8uTo*FE ݤD;] BS-`ZS&Zr:Ó\wO)s{T o%-G-l"I| 7гqE |߽x[ceia[O=<5*^ޢ9]_Z,Vh$,7R&PM?HEY:5/"PL'Ι3I4f"Yn@w^Ej1_4]]b [$QH BFÓ\wO% ^ Ӎ=-=!B֧bϤR7?ئCk= S]_|9BvZsH l<N/k_]nI%@3CZ6Wpr߾z?ͯ/ Ng)-1qXYȪiJxE&d YLE^Y<5y)Lʸyգwz96Cf*,(TUm 4(u,W(KY]PZ1}rLuaofAah9avu͋fA`܈gCNY#k;><5V0+-QCӨ%HVj2xnIƜܒ [|@ aiʲ:Z6#-Zq5oakN!][o㸒+<,"bΙ=ېD)q[Υ_Jc(K:s0QV}b"eߜ;xEukf\ú}X8M\F*te5ꇙHld=nTgsE) w> Swjuj{vX^r~mR[oaNJ3fQv2*,W;6HO5u~U3|-W.V/.`)КEVcFNU't\\._vD.ZdXi %P4OӷjcxV 2N$Qd!(l0,5pM |&:~ru'Rs22k{zAJ''-KμEv SrUz_ rӍ}Tii-nk%ey glY`1__f?PqQg3X_p D+愡H*1B_\Um7yFU^DU"_bq0TE3,pJ{gjd\P'\{# Ra.NDR;n'%sRI9iV t1บ9*R;$U8֙0#QMHɯ/2?"#$v$:iVat0sS X5y7u7/sYW EEGbDl1A~9`Ex_H@!X!3Kd8N]QXKҕqY=ä|,Ճ.,+7**34a @EIDYjN2́GNy NS~=Qc煃6nT4R=> |FzU8PԘ2 SE"Oe`? Y1ma%IL]3R+50Iؼ-yJĮm)v/Uġ cY%mPy|¢5v_eÐ"ƭ޸Qtiۤ#OXt&aYMN&[׶{d:.XEeׄQ: Loj yg؂0hn`X[ *N..֫5v*1[t'p,jw&IT\\b . $bU&/H`jN`b_܀8uDW, ]ᢪ'!Nelɠ:9.8iY;I+ $@pn.Fwb b ̔=;NZz˃'u*ǜ& ^h(*֕T'+G8UL{Nh۪Pw ]j&]TiV=PqUXe10̤D@0V81 1F"J"NX(FHbC#%Aal$i%-]SBZ+_>x4f|9]\Q!Z]K0Xl 8n*Q$ĮF;#y;J[?ۈ9dWndg::P-=n7vl ;7%.GovSE_ꄝA e`VKU9b6<+ǫh6,\|GhHK*JKl|qiO5ӇT>AͷvOU"m,qBw&rLwnk tW #d__:99Y bP'#U'-K7L:b~Wtĥw ᬝ3ֿUM{IdWҷ;rR'}I徖;b7Iغ+?ǙKu*T]j k+;v(ŪU]NJ֤Wr):/&w2%}3߅@hAi+.jPQUFY;+;ᢼt|{G+b bR҈RCO,o0XdsEH1#DBd^9T"|TgȽ0B -BrC YîD$sҲ>kIv\uC#a%DbNP;g\]i YHRn=OQ,E&_>Eu])p#T8E|:0_DǭjpuԾ#?zc¯,ʼc;oy+nPi$nlEc`((I^OI&&}ch$NF^`܀Qfw/BӃ aYYf.:4 G/h&x~MphF{ ?Ywe7:FVȦ 3GeO~Xp~Cpcl?}P~(eQUkC^)7!4XP>O!2&9G` |B༥UέZ$*ex'^@sJJfeb0nGz|}GRn6O>lœZK݀Hyb43z-wn= y s 2$G㉟t&{| HI=\70ӧh-x&2`6Y'n}>8!͞.s hG#$/ :<-F|O޲巧Ǡ䛉7#4o"3Y遆fzңej~#ZWxZL!D5 gx,QtPguDDz/$B IU^% > @И6  -2ϕL3`VctSZX'BEX9yN kY{9Ţ!=}ds=6}BfBo_4m4BST^ lݼcE[NaOT1QܤtRTE-nPg Kb"^r ."4Fh(={,(Vj}/ZNdrJXi5ʹYLn":] 8n,^GbRYsM|`cPz%HJ^*6e_r[٠(Zi¯ź6ΑB$d̿ϊ/%!a>t|b]a2&ט&W4 1GP6 Σy3vMՖp<=C˫F˲lMD 7+s.d!h}Y{=VG 8`#3@e1bRׂ~ԧ˫$K6G[ۯU^NkU3QB(Aqs#",MwMJJJ\\\4M%IX,g9OsOjs;~(U{}f*Ix 4)INtg:u))OËuZJ~tW?uIJ@".bFp܋(#MɚtȶV޹ʚz>n;5ԍ0:;#qX+ȊaT%}?8[ +AE9Z;,0k+);~N&/sIj ֲ8Њ*3_pp?+^@DS#@{>.[9{ yƩ5:jC4=vw\_Bg/5R&oo<> 7UmW7XM_+4{uIkc&^du Dͳ^jF)XN~شcߖ> 8wmEu̩.A4O ]8U pso \=t[QO މv9V0Kċ2GJ,tI- ٵ_+"B)oF0溝MM! [b2wg]NWE?Q$u[A -5" ^S̺J |Oi2CF 9u"(Uo4 '^$M,?I:{4!r]׵VTO~B1Hz~*npuu?*#h F/OjLV;4~-PdâRƌ Ơ<]KE"2? $2ct,@(4~'?D@Co$DB̺Xn{T&3<2-T9sN4!P#Avldꨲ60K(}Ll%19#43 yhr6.XZzb+1d,֦^k-@b½(RF<;,TnlWZ/HݙJh&~($BZρCG#D8F (N4ob"'~FMGR=G97g128Vꂫ<`Pwή0D[ &rWfhYrhıcs0X|Ff>5u\x$ d{wMi4@V|<:9x]{^SMV\Ü#.d:vU{MD{;P,!Zb$X yrl} A={;¸=N12MKzAp ? SFKfR]D+fMH Djkqk\NNK'D ݼ<XmzGtiC$\L 4!SI*Ы{v偁vlUyK^r//مOvj}(@;N 8^fvz) n/mNM `ۄYm,0{L571,#vD r$©1DdŰ`$|};],g]%&snNjtw:BQ RT,H*Upԣ m/h;jr0!ll4b t&34TT Uij+aQnkO@뛳5Ejy9rej:-#3,= ~Utsg_A/T'6A?9AGe)LK4uh9Nmm$ i4*%@?$0g0tpp"ź8;̿]l)Ei5@I&kjr$ וrE ry3#B@^0#u(# BJm'De&xgI!G(!CraZ\3_ K݃޶8_Ŝ8"ˡi9ɦ-_ۋCz4_xֶG!ÄPS.r"< 3^Tv(ZR@o:ccm DOѻwI :*X!c1F;q59uh9{3ShwLBƐ%_|#ׅ-WAk|ީ( ,6,.YW]{`{^waL#`$ ^I- =)s!1~Xe/)'L:$SXp=[y,%ߚ0<Υ_&EURX EoxϟEV1fL%z<`sZ..AtiK/:„l#qzԸ-6x?B{lv_w\4 oo4|zN؁gNg~-<O-4q[GazH6킃1'S~~ccfJ~ '13ְ/nEd|6O?elxN)7P@TFa3HҵkEM^?UW|`UIAH>,m×[FpT])΃+5x#n8xj04Z-` 5Ř1bRnaI^Fq,@׷uS7?E|p)2ŏTΛLG5ޮ08(k="S<sZE~kqJ!jpgk&9i1Ҿg, Z Qd mt'}:F #w= WHeԫ,`r9wh ALNgnv#O<-J _ aVoYJh-O}hk!(n轴OIvOHߩ&v2bNPAZllL+, avZfGfT-)S#0zҒ6 "ᗜ3|e,OKtF Ŵi38) :M|4cLg[k]&N52\Z/[G>]H־[e zg`0M%&㡌.E\J,帢rooE0ihQhO/|Wm 5ҁ1n'޸,cX Efd H-pdRhIjp AN3 Jzȡ5!goѭ?M/=Q9ϸ U9A:#~Zqw X ft#0>QE)O[r'4W|ݴb`CSKd&h(ֿbf!BP)r-4 6AO\)I sP1Ha&\oB!+DQ.4 ,eؔW-)G3U`Qr: В*l88#˰AyN\aT!yiuqt) H(*;T!E|vOltF<TM.z[n:ؒႨAi-:4lh;A-g7ٴRߦ;gSKܘHJL))2fp/p:~O:3WD|߈Bd` w3 kd=]RGՇ{uZ&Ʒ5~Zn cFXqWΐ{^~vR ,*8 W]!.S >wcQFH(ȣ2 %ڟ=ջ~n˯O4z "YDE+&Gf)1Qse3̜>]USKYhc;*S'$E fYd2"bg\ x Ho/uS(fb*GQMQX"@NXyZvSFA{3aͦNX| w[43" MXO-S,]ށƎ)&ȏW2'Y`- ^P&߯Bn!bb{X2,Չ̖cxt(䡯ɂEFQ\"7A"wN}LlHr Mj0,MCX!Mé !=v -#Fڜ@Ky.6ؖkq: |@QG?wbTCHOL`ӱG*Š-GVj꼸ʖhL Ez,վEh.Sg.5c043isSxJQTA{*{њE`T}1?*d5Sis?ҷ)%MsEhtFgL:B`ʟ@(W`NzY0z>( 28D;جVE5.?ּ`"A @cun#_d7 4?N56;UQR.(TWqʢBlΰ)&J\2ā`quZ]#jAp X)A7(\h.dҔco#(VVxQE<^X&_`ZoH.Ҡ')ն>q!Y¨]k#g̲5!iDEYb~1"Wx@D@T7N Q!cS!]*Ԡb I:sȳ@R#FF tq6 V? Ĭ6i3UaTb,J Z>F[ofJKZ6ѹYs_+eRC8p#bbܴ=Pg]΂[H?e(}k~HvCy6-gf2obi>}Hd ۑzu8wSsᝒY.}袉 2NEEUiQˍB&3d-3*d<=Xa4ү9]JэlcL/3w<]ݳ~q;:B` Kjf-'fw_͒ V:ȇ.璞C eIpC+Xn<^E| }waK,fs; -{m\b-bFCNYfl(Ri{sןB[W(9ㆻVu 2!<`!mkk1Rg9? t[z*(1rޮgh~nP o .F2^`M8^[ ?]w~2Z5BK=b4FdyJUMc`wX09diJ24!CGSnphwrt ni*5BIt9f@PF:FH]!c.fF[ƫ~%tae:>-[gQV$iH$iyDIHʠ?]A&]n;{!hFRAcr}.kxZhD1 HQa{ @LB-_;8]-%ShtlBun?.Y.%Xo_?5Mmb$+qdQQ=o mC!Q{FʶL"0ZMr倖,>JMG- V`r*d دѳ%f0 G^30(!J2*)եZ cޤ Q܋k]t"D.{?$``t:Yܨr5%9T{IC2DHcd95tWL BI% e$4E/aRܫ\˓wV"49o:Cs\q :r;ܨNŇudF055DBƛ rBTtHv@VPm=,i0"D @~*$%SWZֈ5Gq-Ux{KDr|*΂熖x$ٻƑkW~dPvKyCrdf'YEekqK{ s(6%RX8ӞFNU).cY v>x =FNJ$ʂ([p xc068R&HNE%:΁.W;T z:5[6n2*Mr$\=Gv:fis?FT?΂}of=XP){J{YSBׄq'pҽ̬Փ_/W\WQL{.L "^tZ?l6̫|z? hMbZNr^=̗(tw_pdHgղ=(/ثŒcvkոuվuWp!4ݍ:GO~%X!c'|PJWȸLb]v3A(U2 7"܇` %1;AdgvpQJ*V),M%8Uy-zJUXwVtTV56V1Ԏ[O~h/#UӃleS Pi>d7(lFJ djDrKO|||Z?(&,?:3p[e倫1 5 V'_WThKMـ {yt&NyA'JkRY7寎_vաi~88ׯ Xϗv/߼ nNQӗߕt>-Gi4(a"`W0E?}&|C`Ǵ&GrOk  'n_rh;N7U<wܶyl7YS1J;˗Z΢;x>ȎG~|Σ)lDt없S\O?Fbh f2ˎKYE8Vk]"a! OR8-bB>r$X[n*zE=Ej+i,IHg%v&ig x*dMی3Uؽ;,7ɽ۸ěTt~GS%7]XF~**:ZͱET5o0h+YYKPҕws2RCtB`mxue4eX ;0{&6N{ QЗc_QKZiC 9\|}Ѱ߰UDBдOt&iȇc0Jd_!c@iS Iۄх̇9+;D5\xa0w g,MˢC=r\p%y,:#=+&(Y8HDnڴàys$v Ri%OQYf˺zS쌀^9~:W(6"$gE]z7/*kx}S:玗߆yu)n6N'-O8_;[4r,x/ zACv=г˛/1&8o(r]Lp#8qt\O%[qYT7µԠ[+鬁BMضY s {u %87E\'S,qYw@,4.>"s$ BɷZt=DW}Y_SO´܏ =ڂn.W`|PuFZ\(S22*mI-DG qXhQ+$(7 [%v^Ogqw=f.(tW!w`ffb&$AYY8iAI *Y+35uqc2}3KSzH:T="n87я[&"p23RuKDܧCN+h]!k]@R6 .E&x!RJSl[9|m]sdt7!U"q<>q?&p]hTGAT/Qj,I-b=ӆ_e| ĶRBZu_McV>DW]&I@ *)<)(SnP ! vOzzl+חC*,zm/ʏBinrΫ(ox$F =\КKTdTqFb^F%>FSvŻ#Ubiyq5x^y$ g>GQ8䝢ZxDžT]?#iS#L1k XyaVFk&pu:n< x|LmTK:1(QpMWXUG߼BƵ\5h3.[0TŅ{`Xc8P-2-Mg .͘,)IgBͿ }2zVXae3' rHGk>PFUk|X~GY.}r"ks2u߫uEFuyqKֿ7oo, Xk!h4 ;?OiZ6T֊/,`Z+ l<7RW;*Ri[smę_bj´n!Jg!!pk}7zZSڜAiO67 #4e1%+J$.em'ݞ \ŚUN /N_O+9>?`fB;k,Z@Kl 5_:xx8WL3սg횡_VJ8OWܝ^w"஦\+\xu!#Xo @ɐ$N9r#A:/*9t s7(hg:7("h׭Pe/.%8=#(",ࢭk9yJ^6kzʕ(|(۫26l6(oe "G ?ΛRp|穜>+$׽-SKQ+"=0;lKuۂ-c>ʆYE8ֈ;I'J(E1C[$9!M>{++PSϧU~s|pG㯐zJgP2sVSfVL,SD^^+{`E4nr3WEBFj3(:T  %QS;gC2^3/0skUHL~"GN~[)ZV̜G=tl D ݚ_^M`_=(nN6j72=ժ1gK]GJ998O,^h{XKYYխgoij$}Kw-])4M->'ˀ/>B.I6,o^z=VL-[s*o._3)pAi8QB* (!{<hRûQ=|wX\D?&vDeIhF{PB 3P!@e)pEbv>xpg _NI;0YNƾ{*d\KZ̞jUa(;R4/F 'e;y_k=ߚDҗ 7[Ol39#dX++.fVxdGެ{u y8(ԫ^m'{Y/1j4.>Y-w~\+M-)/" <)!4R Ћ E9>ny)(.Lɚo^aC Mwӣ^ר|+T mA#F4`}lJ.I P5Il30n$'ӻb99+LQZ:"˽-Q_PGYF覕JP Hm׋龀݊ N@<2ЗB2X|pk`a[unÚ'磵a勢~WTaJ4=#UīD|J3jw~1ZAG% gpp0 e$y`UZk8l,Sw,8tMF<Gj4(aXMH3M3r@ֿ_SE#dM|>vKs滊G/s?aݔF92e5˒X⩉QlE:$h?F2/ۦ j/S8{_?-FZ|#S4툎p8\EiFcYt>͢:y>$? i^~/dWҕ*dUW+_W^ kGˢ5 V4CUYWu;P`g v(ћ( QFJ{rsHsjE"QWU'UqOڝY^#+uPv=֨N0&>^]7IUgݢ]-ڧwB~4PleKNoG‘fy9ylhǴ%LEur>C+d\DFGILD2.:cXKUE᠘k>0P&~wЫ$qc1Nin2 -Jse8VO(I9͹E -V}$T=Yo栴Z%YZ`0,KO<{{i+{煖")Ӻ|* (V0+sWf)ʓ6~Oދttor}rr潘$np7Sel5=eD_ 0w*&y92R}½{>q8U)q =}!>U6LgBfQ#0k-V=0F@qM{QìՓ4!mZcvt{~p5{-!d=qgvv9TT4anIhޒョE 8^kڴ,qj-%2RQR.bʁӠ _jXtqR׻JQf`8_PGrmwDXMi[ZJҍW`1Xtq bX"I5.( 2f|]ߒR$9K{Ю[ﯨ;1.rsSN/Y'CLPKLn>?ޕ#b 3/C7# 4`3yKu$e LI)LJ*vѮC`8OsEisOi}QKmx uV~baZ+wxa$׶ZͦsGr&:XhK8\jBF^v#%zyz0Z[Gܰ㿧2*d@k0?4\@';K;֊R,ש]dwD]i.,\ pe;iPFV_}!Ghjw &"u&rBɡ3EkCNsw2`m8cUoI:9jw4y~2t^G:o~|i_-X5˻ٟ'+UPȘ ܗf8xIqZ֑I}:)|ȔF]A!hIѼ7+2ExNit0{ުaR8o modBz,}4>B9'֢P+cBêOskyGg~~h?CQyjsĊ[cKژƜ`i֣֙sa^7CFoյ[N׃k:4mZnsȬCYfE_O||ͭҺffkd@ m/>BL0{,V:-:(CgSt# 4 'h~*@bwZM,@7Z$荃@DFhP@ P`۪4$(@GTKxI U<8bJ.c 8k?; 13fcsVB\E]xc[jɭ2C1 0*&gdV2xׅv2Ç9hzrہFWӃx N%n!rĄlHb6v70RqrVfDC: !dprlyD𐄇Q}\P8m.Vj춮ӂ|[˰\E¶v-2+H~z@pБ_i [w }0uDZ2_֐i)e:c<+eD^*?O<)9Y_? )djȜAAɭLt3|.6[XN4 Y9ntK]Ppz$|zQ:=7-5>]X=FԢҍ|%C̕q ig`.[+6ErL1AD{6cpr\)K_J~MWد?}> nN]+ұBP#ʪfRtPJ@Kf=]Zcs)[&Ӕ*zI^KŸZVv]$_wq&߫lZ-nS jF2Am;ҠF;fNZ '-amͷI``G혌 m:ϑgTeJ`Qڙ5C' 9,oB1`rJBPuζ]im4 חLѪۣs鶨7݁! 7{RⅽNNQeEILz3=cC,ՔWW]@'}Mlf8,ts6oqݯ.>sQOj~oP6/˯lL&_yr?)aR`_W|_חh۶?h'gr3Z6(Zr}5.Y-(c5|w~P?:f 6uq=~I5}6)]|7~> 4.cG`dZTfp*"XpmMҩ(aeKHpIrNծߌ.To"2 ځ6&T :ȰxTVEä8j>s-|/&Zm47D,h,h`m `i6Cp&*tj=JNXnj*7\."y׾[jc"S@7a4HJ^?%]ܵcB3TЗL JRKk9 gR+sG)k(G5 ]CBin*hV RRᵃHX5l0'F/N6&k*:IO;Hw4GDզ,1iu_sI 7|-X#w 6YY8:Δq3J>Ο͔I|>7bjsP!tuNއ6ʪ~lIЦ(J1 ̯(;4ú N3\ZR:gwnA!uѹPJKioRZPTɴe.Ɉ7$l'd :βv%=( w@6gh=>V،rnbFlhb/ZlZ/*};UF`@C#s E1-[抁T&>B7vwP9Y)XߋͰǀ%MKg,5DB xzEDF|;KťOgL^я>^>{Q#1͛D3;UСs|MBU>[_9v @g.6=zPvm\>e6LJNZl-KCqN>?|?ҏ>}t>_-`- ZtmVh\^,K\ gUʴhXj&9綺ۘtٯ[!'8.\v$qV1M`t䳬ά$t?,YnRd5Nb-m!H Uڤs᤭"w5j^֢2t]5MK12'l+:-Пo1x ?fҖּ2굍Skz.al:[fӕwgFi3T;EueŲMg D!wbH=ӶNL i 1B|7TV+/!|aAN-wIεѥ%fAEԠUZHC>5Zn9j&ڗPH Nө;Ȁ.ho sxA3t ېW;E"+PN͜*fԮA/ k XjceCAwJSijTAp$I+=S67qh#w{Bw .b/gHt#4N2(6f0^v ȑi2%Yˌ0.Hzqpף A [k[ž>L,`"ʞ8OaeUgƳU8iG.&ϹN VJ^353δ(-$oF>8`9G[Se r4L:J̚|v7+9dī˘'vGҮEaALUJR~}N롣{ Z~RBs46o`&J_an7k946Sw|טJ;ELo U nUzxoDF-Gzco 6g$*.!7#36 A \("Va*܇aNoI? lc(sJT2u!L^Lٶu=]u%r6Ee%~ZpuL%Cydv 硵<#KQՇxdQ]&Fkz[ ^ml4Ȥ5^<W9UE(Ln̍1wSUɍپ t-W:XT:1ɍio2<7o!ÏY!b:ogT]<[ gL>aէ1vs͜ө>LiO7ڽWU9Y!J䬴/9k"q1o5;0JeHnn%8vZ)Gl2m~;0(t _\R/y)84+&M׎s25+dcEFͱ &50޼<٦3hRElZVpZU0/2$UOd\ICI#dX0>Zua֠Y7ͨzPst|v+&T;)pO[ ǫ(&odIjxfOe:߅gςɫMJU vҟ趽%+'K@1[P sDc28!R>hq2NjxB2׮߀VB$DU[yME*c{ A)s}g' ɕ]P"ۑ\WIčL~r+pXyK ^gзM.\}r`,yOS'ݹ>q!# .J",VzLy  PmhlЃ!y$6@\{Cې JJɃOES%ԹHc<mE!A܈dUڊڞP#9@ṫ}/׌ұsy:xWk0%+K՜fPliۇ&賖93h59(<9{L;v3B-d۟ijdˤP泝TUISB UhՆN4 rNh C].ΘHFy7C~2b[RÊLNZ~AM\;q`P1ʈOS~Q?Z磥ד'DCzo%vhw.=i m]%$~0 ^z,WKTR{o=V鋘)?4<\ѕ[z\e2Qf/s4sy<&8mX!2, *;yVޘkC\M#,`V6r\sٻF$W;8<"/b1]?4Jb:Z%J*,ŢQa<Քq~1l{l/aw*8o4ZŠգo(ᕳ_;}nz-'uqaqSa7Т_eWdjmtgp6lp2s\ V_/JKh`֜|ӕe~rȚR-r|| = lOsaTkCgTע.;0d楇T!ʈM"xtq6OP( Ƚڂ9n1pGaoӧ_x`Ea_D`|91;w(UZG1I9}td\tsďOn2Lҿu\ ݖ TQ)dsh=JFztОmxo.Zs|XJBtf?(>b 54*IٴR VSsTŋoB.GE'QP>Õ⫕aj9Y`7í|J09d9r/Q w̙r3TQ-5C^ƌb=Է\@g}2 YEF? ı HRIj;U}qv9O+3F&fvmh@wҮ#6n ճPiwDZBB&pOc"l0O ON }<ϾӃd6lzjFI%fVײ9bF'%M. GjhKp4@%'_cu϶&ԛX-WЁXx[ h| C M h(P|)RC~ZNDrz~zTlB ;*񂠥9^O['A3 hTO^qJ$a%:Ť[6s'TQPJzтqbƫ@0뭌BvβS ,kxYCp~PmN|e_rn8lF}YEAVhv}#q\4$y.E xqQrED'me+~M.ޝYEK=j-oN)s*N*P_ahѧR/"Qp!Įqǹs 2"Sut+jBxvVi-U_ڜUϬ'ު yo5&-TOXupB/P(W^HYaZk2Ut`JR!pcK) ȷ>XF 𑒑wPv 0.5R/B1{ӓ0%FqMiC>oQj*FCaQXb**LdA݋e 5X@TR% lffZ:m@ 43AfoY۴i J!+/[(0SVpA*NV7SOlGH žXFZxڱ^F%g7Մs}K#3}[-*$TO_BѢauf!;{Pb-Xc#mm*]> !#CŐC43P7Of0;M,ͫJ%)xucMVl?ik,a}> a6EJH6 B0h>TRhiP=hoZ {*RPɀ}T ȵ+tJ;̺Rϩ>IRZ7L[m/@(s*)bTv_T +UDMNOM]YxB O}{=tDžR];&ǞACDQi]FXиrF\<jÆDݾ]iڄj,ې4+iëOsG"c/ʀ^C?,҂FM \5ԥ)m$ᆣ!xȱH<&d*Vo\GC$JXB JQsF"E"HXT4Q ,ZK_kܶG+R*BzNNKR8Ff<5~Aik6C{T]T&x{ǫtc[ jDo Nivma~ρ>ni(.~__㏥dOo^Q huz3-F-C[SGVVP CJ*Eٝ \ *CKn]*gjI~8όnۀH;H|Vh%q2Oh&kW裸IM[3ZN:[B0Jtf^oCv芙9m#T"m,sq9m߶ZY9X6 j{s+aΓ H~Ne5׃ֹk'kVFW``Х']碫4?>=:*oWy;.gD[ߦ \#k>]*Hy,X 0uur&Nߗut;>vG:fbs`wM;8 +]`[> ^/ -VeA+JoBZ%z]Rc$z)٠&rHQ U#`iqčȒ5`ߣkdxF>zit;񻸜ntKﳦOnpB)|mGM RVvxY8""?@ +qR+o $d V!m4ZҒb<˹u<jIjKIߔ oϖXS1lSBUnJ6Ņ@'aJbODrAt@b}I E{V̵BYbULG*y$!{2d=#h7ݾZRkMhth8HglgUhB },ЮR *$9rTqvcY` ơ::cLk\n>gٮ%/++3y$Dz5?a7JbܟǛ|og?ߌޭ S;~NC|\Vxb֚|uV}8;SSw=9^sZxq7o.~;e:?γ](ɍњ`}0]q5o9rMH_ýƯ*vtJ{jmz^ @ˣ@Vl?Lq^ rze&wZ7Yզg T5QixMnF,0MjsvQ ]U0bar  iT"U;AY:@9kb#.rX7}^:}b=>bnYQ^b};~FXV p85HޘޕIyO%$9 ~)РpM7] *q-?rC;Q#a;Q0NJH/f x^:= _}&X7Q!䵐ql&odҥarzS;>.#T.(ъ] d{myȦ-9@`Qj~W`u?][E'-A4z/y<` Mi%%-$Q%|5!B-39,P-iѮg rOoPs5KꛏBkzFQFRڂA]niD>oϡN[^ OTkCjAx9bR {lk)r~]:rA#H|د_W+[ݹVԮtf]J6UAejDf(lKC%JZjc״Oo.f{3Gt_߬"J*=v  ITz=ܦPl%k(YToirSj[b6ջCذ:VWbZr!2"aŷ]?6MiXCauJ0lR *]2xp8ͨYF|k?rěosն9*Q*T !c$RU!)d7.5}Vm)I2&!Ctb?&IYy>'OJ[=F=;A^өwB@xDگUYkgq=ݛW+ss[ÆD!Z6efȶ0PrKC`C2HfBsi eֵՒ%u;NaM2cƍE+ŢY+`'Vښ*bpJ0:cbjX,|z䭱L/v@Lm9o,GtN@We@5+Pe戴GC{17sI Tqn @BNV#$Wzo/'Nϱf2}TVJ8LIB7`Vdś6hsi9T3fB5Fo|d-#E 5T :ҚZP]؀y08PfvY\{9|8 )ҽg ϲjh뉝OwrωTiM5~rb4!tn-orna?^7R[<#r׏XYg4Ś|>Yfed }cfbd:* Fnl$mk~>J{-wS0ؾsW }}U 1W1/nVƤ̋Q/<ٽʂz5֯2tvF"l=ZZ.Vad~kٶ;abrrc*_rmȓ-v1 9n{`RSFRƼ]#9R9첹H50 K@i8u4f(D v)ְFTX&~F(Y,b0_0? ƃ,YCRΒG,dvȓ!KL` ٢Qt(Y5?ZCL-YOjxKVy2%+*O6JB"ʣݡ u'ȑk dP r\WKهXZ֯~̨o 7.TZ}?0Q)Cg3~ѥin#)c?zZu♚BH:! _٬8ctZܢYa KC X3 o *4Ы.k_jԬv:toIN7z݄8Ss>(V"` $4Nq9*p}ۛC MM}b3jq$} ڌrJ^qxSiN\-^?["3o:rEKE]-\sn/ޔ7 |Utxs4fS&4Bֹ@CqLcqClY/ܲs(z]Gá@,'+4 S s9mtErNyC٤EbȺ&^q\,HMZ6SKjq7s뾴DgF;8Lv n9AbFqdF"5)Kq 7`k;VXTuva ߕDz fPVJ{?w3tʁqǏj)J=9v9#2+!YD;ont!wQ~d'm~# {sbŕe<{Vc8^ o?k3 gz}>pϙGB{¼:Yq]/D]Ϗ &Z8O riL>!h RJhL%ja[c+0"cЉ -^&&GhVx|/cqݤϨ\4MG\gq*m @p ت\\^HёRyeF3Շ ׿]9;*j-E|\OV *w&"O@)FP͉"~3 /#1̚^饍M `~fn5Pmv,AG3Fݒ !)S{2*"U`d0&N?'62+wǹ Y\)]~N_aewQcQӓk۩{pۻvZ֌o1a߇7=;TR =p-HKBekIŌD<*Y"K'{tƣ\Z ]A`G9Fe.S!:}!CL Vm+R֩8 AF<~]׿zji2[f݆/S̠|I?meg;/Rm?q&6uMӡO͡0wӫܗV-Y&Hvoc'b@:Ar;x8D+z(ȇ[kz\6 2S `]gRnd{d S &Zp΂n66LG'kvZcfߦ{RtF>iO$r{)돔8[S\s5٦8ΝL߶΀W`SOw5j&נG`2WErɐ=P(3%bOD"!TWIkJݾ Uʊ=:nx|[on6pu \neZ1F#݂dtU՜Bi0+{X4ѴS2 qWNcԞBaOF}̞IRwZ/YDI%۸vr{l >:Z}mÚ2{/mlV3!<:hcx/EZ 4/J7w#3΂Uyjؗ7ZA~~-=g*uKϼ4,u&8~p.ఐ nK|j陓5OЪ{/ ӜHΰtC͖zٻR5yVYmx2{&V[&X9xa:nH<"HJK% t[-ٞKC@7O\>CvG'5Z=!U_6yvɳMlv&\/z:Oyu7ouC7> э=hg9l&z]tޠ7eաM#'T{Y} (9A6hw  6 fh[t@q ϴ%n~K~qh풽qLT^"O[ǐ ݢ8=n$ȵ ǪsRR7NY hU]*F׵ۏJ5߀!&Tq*=>!KdH+T7zS ظ>;@Wqmz۸ΰ%Tܣ{4= Ф"l͚#=ܣMcTܣ_=ҙ{O="{cC*/ )=7͗wO6ﻛY[]Ɣwt-&|%J٣_?>kS{L~<t>_󗶇5);U{sl7WA]ؤ'%H[6&z6H:OYs o fmhlX -=|ErR+j)x:I]$ky⫏0̀51ef;!2.L-O:+#x72"V /J AH:iyCUb鷧IXrUd6:Sə$zAS#Szfz2 G7!.I bprz5 @M ۴nT2m9{qU25 1œov6";79vo< ϾɪdZN VO(|W]ɒ8ߺRg[tD( ƭyo L:6-iP6[x~ #1d.Os{n/[TfȽ{C?EI3bp6@pj*@j^9 ;ՠ w2AkfYsk՜lP~4swJ_)9W7o6%s%I<5|g0o'e|R6;ŒzxR&ٯ2ۛF>E9 “A~PNhzȝ~"{Լŕj2$72DnUqE9HEM Ücё쐈2S T!ajW6Efٹ^~|ն/Įz)QB,7/X G>X{FhGD`[)>!NJ]%DJ`& -Д5hόFbjPc'"R2ا %lG53ޅ~VMpUR0n͌A7~߻8D͝~ ȡ9nHjwD#0RJ)t.4)߫ S)@ɒS̆bUڱ&‚DVH76N}ۓ,$C= 8,blFY$Yb~Vd9%@ CYնRdd/UHh'CNed(}Ko~ݵཚ:>X?ۿB߾CfYw.RoˢEq(ʳYY1OE^ӛL NfXbo Uo<6'U+PR>EfT2kZٙY*ԼZ91 ā>-,<_ vOYQk)R$ zoHwْՉ) Z4ٻ8r*\k8ܪ*IRLxj-kOplQ3=V_,i8}@Á@U,zSA(1P`PW8:;nzc x\NL־Y̝sa^Hᖔ ɽ+Ba+Q22X]K_h+PʽX**aіe44X Q7إ'ݙިQ2076q:ߚj tϗrmRKCS& #S$u#Jp$E !mLκK7?ퟸ. $UBkPBRCŤ1٠ZVIU SkBVr9םb}+KhIN:Mj?طO"Bu$06g79aP^G%Du0"(evR]*GpJ4eŖ[J1Cmi-mXS(?zձz&coL;&] *T\]TUfQ֌÷j*A'zE&hEf&[~g,. M+3 ,V[+YmզʾGj_1Ŋ_ 9qM xz1%wx-Pu]Ƅ9uuMxw}~Hũwݨ &[շE㼎Wf4;-.K)"" KlZUIyçK̠'Vm3_/q̗hQrי/ю2>~l Ր?(=Љݛ`w7&TzrV6InRέPJ\C%Zkj%Z{_srQD:"an_ o2O'Č3;nYi~,8`GS# xJվQC%? Ȣߒ ? :r4Jȉk:Vh0tY޿i7ﮆyǾcHo9Z.rݵ2tZQ$ WY!BovG *){nF*> ;ےM# qQshbN-4; Nz/ɂ|N zv߾=iF|ZA:Ga-vh91 >Bd} Kg9iiRx*{Ofx>*=fv-O5m!ax8*0E>r#^"al٧" Yӈs` C=^n yZSdtHz{XX<}[׉/(-vzѢwH%EXߘMEuM]Ek!*AUQ<句>zʣg,Z&:ψݦfSF?$t:[zeK2q_.Ԃ>v:& wd@*]m|zs$ z]& =+%< ޟ/[ןSj Q)tkPU_vܠtijVdyfȘQa6Ϯ59;)B笍}2~(!6G8_BTq?GNb`(̱F0G}.RT2}n`UĒ\rX\,wǨtRec3%V.I;; .SlfMio5.y4MRy A;B޿&q&=e"v69?\z -idNITqݭ0D2S_`y*Fзw#-y0B M-1F~k/(@aK:q}c7hՀ5ױ:>TMI흲C)5?j`2Ԥ7`To^Soՙ,\*h' tK2b/H\7|Å9O0i.8Z6"ަw3sūXLd^ݎaVKmmQzTފ^5d50Zbzv,^G =8h1>zSpl΍XLת/;.3qiǫ\5+9|7oԧ&BSD|s\}iy0l=Od؁JN>'u! 0ٴ2کC{Ѯ?Gfbu4>~jA5P+xZXAVv잇G+8  9W~j?TaUHؽb4"beDvNϤW}75W: z8YuIV8B4XrqU_ y)./Oq(I|숂+VͱR-YAqi${~Gaf<RAW`;mRܬ&(5!K-pwgKT(֞MHzOŵ Qg8QnuC5W{C6cdd"Yc{H[ ya$%2G2(#/q mA]oUz0Kf]G7Ki&J&TloX͡p1uAPr m'ahJ1 bqIi=6@ B~ n~polF^F'@C"{=+xVF_-y QRn|"ҿKL%zԘ)!,1/t8RgJzGJ!On)AH"KL _To zsHZÉTdb -u~c * |jSYɲ ]  *,\ZdQ; o蜃@(r}6*hb"Q$1ͲPbȈE^9Sg3E-.RUK]6w5,;}vjާOV,uC9sclš)M8Tꏟܚ4h9'(Jk5>@rSK}dG,Q Y1!(h }#r:>5%C=yuq޼w'#.nվEkuxgݠ:$&1M0l,%CƢ`5U)-9 PbT[GL8jdfmȆjm۰˲ŀ0?n_l)76rb-T6utsl79.ݍxU[ pqQ %+F %1RSP^<QKʿV1M$~ͿTYil=v{ԍ#^}Hc=mO O{좾(7Y 8G,㒕޸+<+m}U$%TTezMEjPؔՍ gκ?K;_&.7#GA/;;TE2yhOFdjuAb2R,)J*0*A2ȏG3u_UxDcmk--:x,)iAOɆ6|rlK!ScTٗ[O@IKɴEc'$mP|ifv8)j Ց`0æ6DHVw~c`tp:k >;9Ue'PgǕɂ`H2dGcIv{vX)L>esfZ̢40U.Ff @&)ez#ubI,^ ! ZdmCt.a[P[|g NڒjضUck-m9O͵?,y;gc "C0=11pl%`H:`2Cq1VtӀHW1̬ٗGm7)8#h: `',ىQJgS+YfxQ46# s|GAO5Ȯx8hv}M3MNkOf؅ <{ΡeQU:8{2µ5R*ĂvŽRscgtL< RaØ9 52v*<[܍Y2[G0F?]2{eJ ;7PϢ%e_1ib,k1.(=xڤ-g?:~d<{zuCLfp_;R?yBgw ^K/XwNx=-; F(@<KTPjCb#>WdOs VF=q=sYeAY"ʂGMejQOiyڙU+ Nǧ@1 ЮfZ61K5!k>%be?i~uĠ8Fgz3E5Yrk2 ` h9n5xa%9[h,Țݑ^x4SX-p*Xk)fB Ԙ/ "Ud #&}r^5pOȾw2N0R &'4{,5A5:a} Xg{uZT S}󵇪>u$ʵC~V*tOx@=Q 'R {'H!q?,?9LzDY bzA\Ȫ2eNPfhL>2NF2ضR~%h R^fO}2\WPgG ^1ə9Oo xKI ާH=@c-?.m3ZyO>[n3e}\eC-r; pmeI(=^7bbr1k, B-[z^ގxy+dk56mA>Nz!zyKSu&[4N/XޱAT#eJ: O>cmm k_b]l@Zxw40q{*fzUណaqtJ "u8B:3P|@Q3Ch"/h>Cz@@۳] ,MJmiRTdUd7\),*{戩|E5A9 z:kjq[v*N'qI>W.>]=-7-@)]2(Nן1EOVWS;d\x'#ZyhӍtש_\VTkP5M'Ijq`02uG?U4BxAvOe`n0`l-.f~3TW󍻘Ff\L;-DJ LVsO=LTݢO?\_^Fdrʯ?-ާp5f19Ԃڟ הZBN>aiʩ>ULt7^ ZS@kf35{̘2Y좝AduAZWhݣ߬ wZ/QsztuΔ~Bg'֗Qy̧_S|PZl)j|>o _wDT"j)9k;|^><7{l*s,*g)j8f]X):w2r3B ه [B zC]ݹ5t)얥䷦|r _]\D4ϟSsuqqn¬f7$B>nzGv< nw%VeZ=j";M4`WE'{G/|=ps ~J7?_\߲hXy0 3'H^2UXukZD`ˣUj1Xg64mgvZ4k?vDyt}ݏM{{F=MK?hGXK&1A/-KV`TK^N.IlT2lÔK"m 0{=XaCuј<&pӃpO&/4>0”}˲ۂ?wN8}V'qo4R̃hNf=Ձy59ځy-Qh;:/ڭfsVfcV"@́kNeh*\uoGGw1?L;b`V&xe)g3~2cNLG }"J" z~xN?S﵎pJ iԍ@M  bй]:=0t!QoRt:A"L9ydܫ&j"lYŒݿL׳G]#!tlƢWME:d yr) \t" 12(c²d-,,> B谌ݨdvv @vuQ oYw]_a/$-Y<WߚxϙI'wVf.7ߣS-bu}Ybcnp;&Cl_j]ډ&"$ QD͡5ʐeF>Yi[}zNk@r1˘`.mjq *)CH+)"IX5mZgJs!|.)}Vb 3 y`FeUl޳*\7Zcr0 vhJ)Ɏ$;ncmǦFі~jHAPY Ca"v2e7l`]lpVF?v]ԑZJƹV|Fu@]3)PS bzC`m"`t8f#(a[^b3ԡ 7&R 5X6 2Uhqd?ISeXn#f^GM6dhʯe$Lч?%jMs~y[)QoQڠ5,ihF~{R}sSukagkBK~ٚ눯[nJ~2u8K 0ik.4^_ v%Fuct* $Et H1~r:`kK [KLf.FBᆦ1NʼqȏyACD~H쉧]Aח\+t=B2֩A[v9~'fUh+4d|#9a$%x5_nSuu[O4YJ;v TO+lɖ'T`tlڕըC5-9&PfרY~c[Kf m2H' ڭ̌ fF[IX\kOuh焕5|ޝPwgxZI:'Aՙ{@GB8&b Ta`|x&T)d~/BRmO 5F B4mLޓRLVFۤ#ϔ  &V'm[^P(~ۻ4?Be6Z +\tޚ1m!lFY7F@~]R|hDO萙 .y:$ʁ$H> H8:$TʔK9/Vf5%A:rR9v,!a ?l*JjcW`Vr#u8@} Z X`^Ik̢,|$؀ kul΍ X3~wת%lpbGe˒X?}Tn z[nOncuCͯ ӀWYNv#{%\;`vR5" J}y?آkr&7ѭ[ɧlԦ4},~<)Nj8;h0_-~8S"|KW!7xv$L6_,j43n6>XpnNIqb b󷳯gKK ^ME|Cu0ThBHi@/g:p742ӋImsh:]ve#scV4V9jOUy`!Л$ٲSX讚ѾI+Θ(= o)T}Y 61h0dzm:}tMSt xZkaz9<~>cz\DvLJm68 1! %]NI2aѐoxE[NmI_n]U!ٳ'b=Zґ߯zHI# ) 9lX-pޏftmNQKFw@'nyܞ2"je5,-!'YNQ=kΆ)jgn`-NNKi!w]F Z\E{; Quh";Qpy',ZoC.pZI좽Qg )9yJI nԂ/ۘ5c%7*v8X4>|X#~ua-[7d|c@q1zpePʸө7.g/_^0t"?׽M>/xW alcZ<o }Rmai`ZS+X! yLaa˪~Eݽ0KU_?}kU>Z;;꣬3k+&jhxͷ=Sc^9OoF>_IgN? /߹ـqcH2BMFXϡλϼdTݟᣴ?>ɮcO5`oH>"y 1Tc5yedu"}D33%lZE0n1bo=ڵ=,D:hw"xlvsF8KRǬ9Z13D $.WÉ&)7NQb8yHᄐ\l/uAh0,dq,pTtE ǽk]]m7yK(vpqdl v댃NW'18=P( p}Qwu"+0թyFdN@d_sdݛJ*qؽJ\yhtslAVi'=!sr4U"n#rs͝kYi&|1iLcAI#4Қh&ОI 6ܰjמ1: v/ ] 6DE.v m)Azo:=wsk7t"X>Z[Pe\xʰHR/ % -E~Aon06|%G@j,ByM 4eZTy~lhXҔ2PcS*.j)+G)ء5)B&KsL+ȶ$^ݜ/_pg,Mtې ae:>-Ui(4N2%]L}<=(UHkN4s,Pl'vEs㡬@ ӐZb47kmdži8x9-Y NXqԏ>j/]8Qc`(6ٜR[jUI)j3$BȺ۷x^>JfUJLK,Wm5|:l!nzւuߏi@0m\3]hwy58DPVh'H2-Li_,oP <"׃[fA;FfwC{`u}Ҿu@ ÌZ`:tRޙy)je(")حSUDNѫXB9!sW&: -QؗGۂ&Gn5U0B,ik2VuG 9̅%yKc, [ Z- uj#DJ[f{@23 w[OWi-y1V R b[}0`DkDh Zr 0~ȩ-CEݮdBPnWam8Wo$ Jbߊd5 ڷo.?e..jiU@z%/=UcIZkar+tEsaI_; %=uV5G@E  q)ԯS:~5fg}9ޓ.[&'od+0ڹ帴øL!Ӑ4SKdaI=p=jfc(}Sm>;3xyYIVvC;\g ctxl\"*LtGOL#=J+=F Ib!Q@`- D@"t)MR8㑾$#l[ Z;:2#@[T2 H(4eiR!#DʼVT9`r2EڃMXH:JQ"S4{0feu0nxn1.RT;ݻ]i]臟>򵩖4s{Lޯ0&Xp|['~S",Lt~8ߌF,"_LO;t2ax9\x?Z.ZcjKvY$wug:$]8F(7k>HRDd6úɒw,JTZ=*%G%:Q|r! Ǽ9Z.w/Sh% :Ք 4VNF4KԚ:ut}5,@ڒŒ:{{WsgROz|Vvқf_k]oſ ߳7JZ-5ap2 oF> |sݻ__Tc{ߍLMdn^aMIڣto[ciBѶ+(9Ř 1v}5[F]#.TuU1/dU}ECGF?g_{?؞N@_OOz^_퓓NgmKYb::\QُI^j=piz#s6PTƺB*NjJ$UP:]eZ߿-`t@[!zikb:l ݯ$8͔C႘hɁZC e%ER> Ek>0gY^*Ke}ZVZ j%EefZjCL)鵵TDfYks18Yt[υdΗ>IzEhmյ7]ϯL>P9F{iWRn˞v =uV@$ I'HmM ܆ 5h#Hg$!k$ Y#IIY#tny]^$E1fRd,]o!xwOa$^6lDoLg S6348t:݆bЁRġkT=(C-7C'B67v:b 9M;X>'LGI44q%Js.k'mYݮr ny 9gZ62pߏpaNvqA8F#)Ə߯$1H3d,QMWU]ph ?i$zӭ^>P ֝Ґ-r `xs4$sLaX^`tq!5Z{=6S3~ZEuO a`^ʁ vXa鸼Q(-001Ye׊\7(hqwFIQu6ZSƂ@pXBT0RP.sMtXI:b#[߀:p.sn()hEL@r䜌B.I|PJN:"~cJ8Xgha#.jV:-jD1&rKI0װl1p/HDD+7'>ʈF--cd}'M(,К5qycrTwQIA9Q;Z[azÐKR&要%p,d-tX(F;f²bY; &XF4 eа.\y.j.[ 4̑hu(s 枀O,e@1O1h!&9)J^!YQ`)m°(h`N%EYX0[oȪ[wpT ֨a'I{lNJz ``p`0?5T&d 'XCb@!8LEE%_8!Ai%"nzu[:tA. o'<{{ƷMg.˞@6f0+$4lJO^Rt0|v1QY>\g`8oz]mL|;gU2A[{|X2`\/kHs N /146oKrkwS,]v&sb2=~mkεwW{eMv+g[ߎf_⛚}i$0 )vNȄ3XdV 5#r +-oP\h| 0&=}`lRS@Ho,8AM051n֚D &:It<.ImRraLjdVI F Ż6ۈLpN&KCN=s97!ǖs Df&5(3i Aih 3AS"96|9"3lar* 1s ~0%Nemn&ȫV(zV1'Llod&R}ص ,.D$J݆0K29މq@8&;I\ A0es2vbĿ 1M̱ZE 4k%J]"rTIí0զrgi_WCES.>n!7x$uc Bn)` F]p815XSQqPF 16 ' I؛Uk5 A@SjKB/1L [;pN.}B\H w\H q3+8COjp`$ɨK2A7Zw]M [$޵B+t}SF$$:pXFXUB`F^QB VރbEsTkT"u)~RE 7l%PIc&`xaa eaR(]V)wNiKibṬ!z0󄃍sY5&.kHj|LUq$µWhD׀\ѵڹُWbPj}m`|vXz}l(M|AzC s `^*w!*{:bŎ Qv!**DՄZ?}qXBQg-BRHY8L.SY<+ueUsIKӨ|.I򹬿~e9Kh!ǵ{m a)aiͷa&v̆4xȐ7Gu ,i'ru8 ?d|EС~ڣJ>`xAgljj}te]!PT#{`p'[|Ge4c,m:tt\mڛސ:7j>8SzH@R*&4V9)EUn(\HblnA:52N;Ln%0v]fih6Z&8UG=$xd0*Jrm092hB/q"9HM7RrA&DDU0({JO751*xf)iC[ &h"fWIgUͼjV8؎링~dp Es;*8>B6#x=l &ZY%åBq^*jF \y\tAiV#. H8\r~k*NzΜje'_aeA>6I<2\ga4O*-;ɲh&EZI"Ŝ]OAk;VLǟu'#a"&w9W si-Ց.\ZʈJ5q_F}W k3ڑyDm1ZkpϘAjqs7;cs 3dLcp.X쌉/`*v]9[Ͻjhf EnqV;I8ZHw@4+5XNTƔ™&X)e&'7SVfmajʺ9/J^uUovx|BiܽG6L gPuĸ-DSec#+־˙T:<8><0;YW{X#0d]x⡄]hv=Dp5+d:O_E)w$>g_B>,obU_d8& %0yV𬥒}Ѵ\ FMYM# |7f6\n@W;Pn; @ZPzm@$@ċ~HjI?Rˣޏtˊ,Ũ w7%Yp_tmrB2J#zޛi6:&걏2\@ ?0zP?+KObfS2w99J>ןFm*I5vUqKBNlwR|Tn1P\KӧO?M>ӳfB}w7|*>a7f4l~6 Gٰ8PӇZ'19Ln#.cA>~J<= T^[MDLG h0m2Ws1`S <W9,W\Fav]<,s H)n;;[]^VUW/v`Two&ޞs< `zuj91zW?dv:~;|5rp3~{ 7̚m_>4 j_??["|?r3M,y[[\~'Wp닏5|nlֹK湻[?E~z5ޮu 4%LrY!vQ.|]p;W~եMŷf0*Tm/;,bZL(C_ o/>?7c+FKg&aW<{-tDu_n?8= ^}EWwhO)$7TYb%9~[ir|=qT銋 9_~{S쁭sl5U^JxgծY" e%ȳm>ˮ?ua;sC$)o3k`A$Fsi.*%7^U[LIvF|؋W[NIs. ^,z[r~!-)Rpz˸;F3Ӎ=n^퇋pMX21| ^ΚY~Vɻ*`7@BMN0{^۵M){nΐ_uZzf߿;arY/i|[ĿTU1"ѳ1?FLjΈ~itG_#jeL$d%PZW.ccx .{ڑ1࿢tU@]6E EB٨oUE!qÞWq+bϿtJXxnHGRHɧ#IHw0J*~> VcHUV Ye :M\A@z & `p+ePFΑ|e+e|Vz+#_J'>N BV\Qo$.^hRUD_QQ6il |e%j& @%xGgG?<#t0}:=ڕmhL2FJXWӲ3c?R1۠4qQ,ї `*9ZݠB=F9}X#Y"|1@`6z+bkE{U9d RH+`L/(NL\t_ECLTyjpn'9\^8\>ב,靴eR.XL21)ȝ`,203IrMSf)Nvį2 xE\d2pt!CmX5^7 j9lRg\[,(F.)%ԊUj)v"0'Rj`*ÐWJ``-A1(ʺ6NqDxo^G&S@Ȑ1^($E-<AHϔF dq'-'um}>(*ɀ(XðwDA+>TLkDǁM`p.8:E`T? ſ\RD{cݭ*i@=Xz` 7s3y(`ۚ?v& qk+\7KEÉ#o2xۺ1¡.ESΎ(]~נ^_y+`}C%;mJ!O1]WtP"xJdqPZ qprOr4oPԞ_ Қ]o;05O7ħ"X3xICC&00C,l3*ףߢDQG9+>d;&{U_Y2b+ZNˀm2sg> YYh:: ?"Hq,GX(e$|{rl-~u~t'Wte/)/ n?\!s~CYܬ>ýA3G-2(s줉 C+o{%n6E`PE&#DՖLA8`z߽O`P)bU^.0[1F,L=2 /N`yE=5QvAŨHLM bP)#Xcb󋼮$} jQ1e5T.0ltY1_,R/_Ǘ (*\M+`ƆyZw!Ium !!Ly^/@.0~8lQTA#䢰Rj"͖В+ j˵Ʃ# (@;o{*H+A(0ްFo5HnZ#zHWgz E),5 RJ41.oy>M1RiZ2֚k2v,YC Sbi%Pkn ej:% i! - #(U>dV8)j#' aSfQ,)^YI:teH>PGkk@(x P@G (5g(eJkoц{3˾k(5/9isv 4:D@[hOWYbF"iȵ|񊈑Ee9  H$".BN@Q2i7 ZLV9UTslSPȼ Le;vbJ4'"(#Fv)]fndѴLi,R ͌gSF"cTإz4n!jh609a5l/(1յQL!LtXȺ[b@6!`-`\ ۗyA =EGc!/3ыu-$"G)2u-&P#>hihQ߰E}Yn-&m}~<ըwl5 < ,(jAϯ_UH9[}9rcPOVUǙ,)uۨlZYŪU']<=2gYl!Ny/zOFa[4ހ ?ox7p큿~7w]͏h&<V?& ֵ.Ύn 5 rRZܚ#ͩzyyH-{y>~Q5W!fJ\\O!Tq[|#o ;ٔluCߡ T'6..""U+]⳯̊PW Tv*X4%>?;n >̵%995Wx?{bT3W?ųݢyg*/O^ͫw,^Ǘe2g*7#On M;mdSs]ɫ})'q}]Oa~2ʪQ?Y-_M׎gWRT<{ʃ2ǔWQ0ĆErsR[YGR9L C ?ba.4CEalPXնmIEZkN?>YВQK) MnNeĖ^~O螜TL3\n] G w5>媙 C ;_ ;Am%V#Er4bzQi-Lκm{iB({mnk7y6ULMSt' N_ 3Sɕ1,V>1" ]M哹#CseQɍj6HMF Mp{ZHz  #GrȊXkN$ςuPKEZHCh$F1%k?V6 !(s1vNS(2ۧV~ -Z@}nX9raQ>ƶ^lnþBa^b@LAr̴%M?bY|%y)ۡt7sni.) I_)V٫v- FW=I,Dý~H NN!DWnΘQ|lNڻjh_Fq64GgqB9;*ש`uK|y\zwo1oi~=а;⦸V7cKGIܢ$$?6]$3Xa5_G5'n&jt3זP k`܁y9<}Ek3H'v54gN {ߞ0ѥ]dLs]?d(BJ/2i&mg@e's!i* -(fdXv"֑ %@41?{WV]`T˭[ n$!c234ju3%EϹjy")>-v$usϩWMN1p*%FFkUdn'WF9E+r|O+|؜qP R9Ğ  l9+~8jmFe h nӤՓlyj/\dGk9S J>պj>ԒnsXvaSCMv= j,+lV*vk&0/\1=2[66WUQ*4-BQghp UĂ{ x$_͚*YF],yO{Qg^zKl2ؾ:Ā^|P#j |5aj;9zQ3s+@&gc?z?cgU^c%EsrG1hig{'&6>>N:gJ6R('WY*M-|dys /[fG?Ҟ:>tVxH/ioջ],:qdUuKVJ.M>s RLV]1Ql9OxJ4n\WZ%R"MY+G5c3v/trx\>V#3F(ذ_+bWVVo"Ƴo4>QOu|.*dG&۝{"2;084&;~_yñzصAw;=@:q77FxyR,O֔wz:ܛgE#v1߼",K;ѿZ؅c^d^7ώܚ;;OR>OOC|vv22N>~w0{牬>8Bg҆Qlq.ͻ:M{gdVf14yrv :Hp?lLvp,dn' lBKvlgAkMK~lg䌁Ɨ;44Yg3?+(šc9~$ Di-L'ESUf("QM+Gf9m=mP?ܴ|=9sn#H{-y.:=6M>4#a2_ig;fDz*Z:fyW]N)6<SMOiTS{ &#uМ'y3.}~zӭY|r6m"`g_6MlvLy:$KOa|qgWʱyқ7Iv޼&oozKKkɛ h_B)mRZ,۶x|ے`_چq71=Y?Q+V{h]ϫ{\{`dɝOǽ_/^Ͳܼ{wktYx=l>U7\4׽cKrtRq9u"2Wp5[«w{?o]z{?N/ ۽w:<; ]"CZ~ϳl/39ᵖQn^TmΟ5DɃ\b߷/҇-]`yk<P?#`'փ5e+Q˴fh_N^zyÄ5whl-iw Iiūū?"23=zD߇,~Y;?Psʐo]gwr}tC麟OW21_ʗ{͓Fy[:<\F{5)yӓ.JΤ5R19kj`)tRΒv(f\g,􀹏d[_jк_YRrq {L*됵VI2l-EFbcZ$M $3I\K$6Fk۫N=hStهlpleiT{.lJ%cW&.!*FLL,5%ʀtYUY#W{6՚35ĎIϕ\s:ň <7#8\mLXQFY2ٴO[rNZ)1vBB|KZvWDy@`\&7ΐWP{VV%]O@XI\GXk6&:T#j Vx]#UPN?7yrKٳh9ȝ(RY6(e*Yod0?hk\V 5U %&NHz,sv+Vy8Xaw zP_U 8_"ZPjJu!e O" ]ΥB4gSC/(DAN( ֺYi@@R ' O"ʢj;kB!JǗk2ZuYu;{D 7l1匎P\oq"!4AcG#NYdfjS BQ bTQɔ2d,>9il[9jmHWLJiRT`=ِ(ΧCD.GʔreɦtfPemk N4AcW+H2(Q;ϺZ!ĭ`f<5u9Bp1"2S 4]DUtFg.P9xBQ nB(ȁ D%4SQ(%JFbXb>ndA dv kљlڃok@5\,-*2ی@) K F,Rl0܎*PSʚr 8ŃƸGe] %V*t'Pt0ˍ!4)Sh RD16X3 ]cHfs22obKh{ =JD}pqESi4+/f䥪*ʴod ( +Q%C9;vL0)糳vzkߕbʳV& z`DZ$o dy@X8pi>]KUCgYU0+#h`S!yyd aX hTc!?3{K N ^8͟M&N2ح-x4< chx!c¨Lt EDdҶAMRB(NEk"*ڽܮo.O.ߩ;F,M%8|( ,꓉wGFb2uأA!ǠmU@@ʼX(CwAKBJӳC׀R"`*D"=ѣv AnK )Avr;KcEZ0t!+WB`Cƒ6nTCroێաgr;,Qbd@M/ Of+g~_j]iHvrfG/nH5ۙ+Hې9C MEH@~,|翸cWxjQBXph]|I]aG&'_Ge0ǯW~ wA~ ,pȟ :LeW7x~`RϽ`ZQM\Qy-|:'8g맳l k<L1L "ˏb@0Jk¸VO&<4y2i5OTύ;~1[\(>zo8/^lI-!^09~yTȉ#m0 Д#xca&L &6R`; d|'p%╋'L!!hFNn˜6,F5!+KVaױ\%QDW/2uY.Ϥd%…=ckVZH[2{k5|mDt}/Y7jIJ >:]YOn>>Ub|g羓ہHӥ5WBԚg]`'[˶\ lĚ譙/\qi~K Dq{zڕL\K]C@>0k(A%&wkSϿvƣ|xJ`L9Z!?WD^!>;>&țaZ@ǣ>&;}vmP#> vq~8^Pix0G1\MOI9N#kD)4T:>u_~$8lfPI:㸷!64^N?ap0_y•Hmw8 \]vͤ*,wM4j@2F(1LH.RÂǘjlXSX"J1)GVp҈QWtIq'F bpDZoˉzoM:Y)&bW=+PAZWOr{' |&]^\,.rJ%* 1L%*(8Ρ89d&D8g@P*Ysư:Gň%Χt.AkFZnMDc r{_]n.{"2??>?l bC =:D2S G K(6RJ)kcf KbtDX=ɟu]aU.9 d}¥@V6Hގ%/B&SL&X#2$)m,z0+1L}1$LqA ><đ4$vFhATj`wÄ JbAAB$ZO-h= a>d^TQHֈAX $dݠ= 5!4bAG$\I(.vaE`M14J1(V%ڣ\>ݺ|ީhǚg#}Uݦ́CQA`4ܻ^!(RCUcE@yvCH&U*"H2 AigcqC@"",Q),1O`Ղ٘*_16L1`@JCBA[&}Qoc  0/Hh:0_!ֱčE SqFaک}Ae}p5f_̾ %`a5uk`YE'+@"9=X)/%+EbX)^)(crp|(4bz8/$ 2`g R9̜.g1ˣ */ `=(`vkb\lɆ WSg@D6,dcA84n,7`e4k,-`ׄBAaQِcmlr.8.0!d}T2S-ʂRJl,1\{-$*sFn=: Oμ$+Xّ\:NѶjzdy1|;Kezraּ.8}Le6 >TyydR_翩Xٗ/_'̛} .YnZ>fz=^3yrd5 I^sNf +GI΃O;4BLFư.]z`̆\B JO?F >4H-ԩ[@ edE G&F2~qr~Z)oI@ās*8w,wEM,_t5fE ]UfG)z6-(`d-fn͘F Qs~YX IѶmV^>U h5 +!-ОYX ͆h*P067ℑ- ]l":Z=9{}csZM+{}-b ^V%CVj9_-|s8mn[-|yLc9BE: pVbLʖ_*Xs8-!=ZTӟo&8t (4$Kss6,RX=ofE/?E~tK3޿c(q:FQ' B8q| Q7tf_|=^&W^-4v?S;~97LxF0?Ƈo_>-2 `V\"J=>u,/ ]Lϣ /W`я.`1^yO<.H/`WَrКкdzx^s70qSt/i9FPChBI:ij1HXI$}cm5SY4?{fq?!1O99_?]gS<^{FI2)>]d˟AFq9Dgf0fiy G^0_f2="^E1|Y:_{泅'M$׆ {cؾή\9̃rp{= ~NkNH_+5ze)Mv;}LdBuȤ8gU|q 6K^n".\ L~5h&ݓ۬ \8n͑ VVE$DQb4.b!,p"<;w"Eb)՘P&#^cX)LQd!QG>:0IIGcuVe>'M$+UhԚTͥը)OcJ Xqx)޼42L!qgPΠJH1]>Y $LlJ?$+VyKwK=KjsĄ🉘BJF`Epkiiq„/HM@PB qe(.LSWPj!ft UNkgЃG7M0jhƣ 4Xzrh6CI.=tp&Ar(D2 %+C+DS^cn9"C6}ߋeO '{( bVLgKD%43t'4/ u(z4/3ض,SApwֻf]X-cȼŸρ=Ru6Z;j; In-*[(K3_΢ ;ϦXP`WL/}/y"`.W @+^CX2]}`w UjJ/oςLW.?O灓Sď}o=y? jJm[G'sKӅ,Mi(B)N ׫p?AܥWGaO[}@"l I.k/5[\ nOpCq9{i$/D@SpLɴͪؖr=FbѲ!_2fyQE3i1Fdn~T2{H_7g H)`1[-埳z`Xo v?q =Mr% mtnVͭO hY0nyZ En+=9iޢ_m~>5?fd:C9^NVCQPLVpg-zƠ>M'0ZbnZ7/׳Iyyd:m~SMw,*}w{LJ:m}=l (t>v lK~YL*重T "{=o{;u?w:tj9*ZB={!Ɂߍ/JÈc{7%={zw)9E_>͛zռ.w菘s=EN..TӷÜա#{7={~-M|M4雡@j_ .I#IڡQgӭRKFmQ>u ;>*ڒd]hX&։5JV_wzUpQV'uCsVeLU}R@u(*m: ioZS/޵q$BDU a D8;9 !ژ"uKҶbY\VTWUW}_uOn4巉?b$ic*w%A47+ۅfe0:{~޼0E,HͽyD{Lot};AH7cr=uOG!,ވۛa}lu+!F{RjZm5׮?Y;~}8%S'K99['1քvM?vo$M`k"NW*iI;^]DݗpSu?JArSAm`tvU[9)riS|G sA&0xM匹)UsqjO*g߶kOZ{ d}LbCf%Lu6 t>̧*>v}x2zf_}foڮs7+SX;{_pFҰqu͓_r`#'Y8:~aJ~{>v갽`u!C\]U޷oe;99[o|r󪗋/-Ç?>0kKwnqJ'G1W^S|uj׳A^%\:%_*Su:y 9dԹ$k ޴ &dpHΪܐ. I%|;;Yo"|Fs\*U"W]Ϭ=I\e(V󮀴FSi>JkY]Rʶ%eԅxT`tgM0ΩRHb})*2r?tSv6کIMətQ*Bp9$U˦l]f["]9PQqX\zD܇yԭ+o]UpQu[YĎI]VPb@9e\Z9)RZ/n0LP![͵FMԴ0d#s6EgFjҦp3A_T}Zo2|CDA[m&N&cf@_:)՜*pD!6՚ə[䒵 uF.ٻ(%l#(K1Q6!ԩeȟRc( ޵ .<8Faf3;D=+'DX$UR sU1srC,l3 wh]ӕW֩!k!$ Xu]2)R3D`BQVAej XUS Ԅ,4+)Un%]ZG*m:„ޙ l Zy25B)B5RdBqj(g'KȥAB$|/<| ˸BP!!@R' 9H{hZ5a"w j%隌VƠsȝ=bK<.1F0e,Qs a]Q w@+v@5!rŤIc<Oۨj |78dkj3=iF(׽FUiTXvN-"T.!rOt5 # "ਰ  TZ鎄 0&F@'_"BͺE+8yOcӾF;&l`DS"c@\5Z z bGU >YDP9ڭ6 3oz|cJ=RƄ"(CO[5p\MvS5!bƚHx(̧}X)EB< ARi)tIU붵@"' ɨSNl @(֘;˺RA[ ̀hQ![O)0Y(3X뎈QEg X`xH}O"f0BP8x>m:"S_`$W{HY9 #gU pDžeE A9J# IV 2o)dCa$qaFƁbo /!!d<ԗMd :Ynm#@܎m}[ V'ES Yߨz-y'uS lBeDw*<}\XwO7|y<:_1di*S M;pm&HG|t}ʑǠȺ6R%:.p /s0%ӳk)d0 haIg}J`,g vp  ex hwLX"O9!0,{v8=4)œ@e#kvpq%+/%063i*V@TemP<(@"pwFDMk[0aRB]%iY&R@'J kဟ?aT',$fVojY֏j{9ExG7)mU٨!7 |+fՒ%B0%˅\[eImz:wBA Q(KKvs0ZD0uPqW&,0zR6\Q`- l( ;iVǨA0BԦr4RVO ڮ `L<d'''RwäDlu@5\ py݈AGUAb(1-T@=@u!%LY P`}f=!,FTWYW T֮ <@Fa~Fʟ|Wyb`\8PʲBaТ!܈B;L z!N),왁)clh)`ǨZznzgm͊Ƒ1TiҬTVg ifҼNd&ci` Vqb]΁?!\ۜ׬y~T赛!lu3TcUClU\@f0m@hx\X@Ǐ?[@׻+>{=l.`&zhlO NL<Uojpg'킶լ{k¬k+f=kNm=H<-/M(&u}{79dJ~vtœAE篋+"j3X2V]R??^,OcTΤ _Q\̐Ѝ.:o' /_ɏ?o|NlpQ˃|0F?8KsdS+qޫҴ=\jP*ct!@|%l+9ENvt_d$- y3{_ gn_Q[m{{߭B/0g#t:sm[鯋ӷ{٠fυv$GO/Azæf ?NCqz~ >NW0#5w؁F[;L6Ώ0QuߡF[e+hkgm ^4ڒ]<ӵ-%k)r-ޑYYzW~/Suk>_Cn8}j7 J^kcu ^wKW~x'/i'α9 C&GMZ,1ueՐc]Ę)f4[}S+"Y;+]`1(|9p$LwI&0ɺkΜd89Nq&yA&aEY9(Χ¢Lmv`kvi yF;姤lY+o;[ًv7/hp?y܃φntbߟlo63ԎrQj%7.*uhqH,]\uRy Bm5_UQ˼΅]Sn4:#G1i3l5~xY*%teٸs+^2f?6Q=z;ZV\[ X_+7ީŻʍ[urcf zhCn޵5,cI]凔ͺ'yImphMSI @~xtu5 j42ClUpPC =)ӢeC謜vvM+r;-y^sKM.z'lԒ?ZPRR6%C $hhy5ZmDoCꮑpBv R`u'$Hw_.&,(d0Y.j0ٙK<$ڳCj2\fV8bH>\.uXW|FqEu[Pϯȯ2Zk y'1Žk te9eL oVBBBvp:\#XB5"EX" 1N90vX2D3"&Kg$bW0Bgv(b(dPF;+;BCW\ѡU"a="9Aa2"$r.H]0x.DI=(8$0'8$9f]spHr,02!8t䲳2:`l&j T!8{pHVdUDEt<Gg@DŽP.1 u3|H/ P͛vOJ~3&֜\8c'a@dP{q{.|\>/0SԏcLFJJbCJ)͔3*X>:7ע_[0Ql bBpj*bMpC*!OC4Ԍ85oJU\P5Q Q_r?z "MDj BX@URA6y247')Ć*p3Z>A1Ii"TWBW~EA_0,k|q)m|\]zKEj_$w~ \OiHo3Ƌs] /`[r_ To&L3Vh|M[ 8*Nf=du+~h :QTBOD2bs*65ƤCH_syd _ۛz cĹ;G_6Y4Cj^٨ zTN(}O uY{3qc? ?YsЯjSb|ΉhV~N7'8_xȺ7GEgdʖI7s-P-0QZ3Aw6v t5fDDD_Sk &Ev_m  SEaҦ_>vx򲭔#ol7fwaXXm3/M&-Ά+^ QHU6rYGRA<|4wb=r ]y' ISY& 䆮*)¦fa5W0=o0 P,<צ%4 >z}-։U y_c:w{QLg_8T„]9,.7 P6adND ,I2崐Jr!T`:G!COJb7Ş1cwkbxeIIꓽQl%TsjE 0xu/A'bl*cdKG.I_lt:nE];N덠.E;K:f$ڳBU-9Qx*$lق$騗bF .4\7-j8wJWcfBюSJ.$xx:2E|I@5:dɉW h* rF Gq]0gD&8:ɥV@ +z:w䂢pCKH{Ԓ S^#G[lXZ <:=6:h;Si0\ZgjPF}RZk[[qqr@j:{ڳ g1T0\xp8k. kڸ5s^Fc~]km+٩w_h?֠Cʙ[T(ktt mAϿhYԊ:Mxڧ^jT ЊvҽK%$cC/`-%⌒.Edگ_KhNxǁAR{#c =DzEᅍ *ꋧS҂ !kwB%,ZAVw=yIZщ=}Q7"]}Jt1j: J!L?R6.)JϝE5PK_%u%'>u w'/7rۍrcl E~ݎgj̃篫gϷ;?a>iQ^a&30W _s>p۰AaONATRgc>ڑ$?1rLxKzOo?>ތ2[mg~^' }%kIqA0%^M ?8m Q f\$K՛ ŢHZ8m]wNfdgQ[ȗFO⦍X%o3TACjaG|<!wq9(;P /6_*aBQFQ#wB Nm1 K@–Jt%u__Rzn3'duD '*Q@֑vԴD(.|>LV ږJ )^>7^q⟿v32@">wanjo7׷С~%^mJnj.|h(I{>_@*`W59zKWmŇU N_U^>mw87"}Rv;ieej(Rffa4H)k+u,y&nq!h&mY] f>6/1^ t<iS/ntw䵸,X%c/IҧdXi Uyce6KZ&5ʭXH<9;AHq~DJBQG^.e'G@QJa_ɸV-Mckv7$eÔ@=Q;V~ f}]=ReX EG՞PU3\gCP7 v8[xhg;|Cs'jAb " }FKc(-Յs_@ 獕ޛ7Ow78 Ḍ^NBME)U}e3$i(0qIMC(C\t ^npJ1 ٙ/q\ލI2}k5ebWm/x-4A|oܣ%GnZvfjx:srh-?`9# \rà Äh5q|?pٳF T!监\mƒjQ2[!],Zyf E\hY}ǩp8fq[CZP"/[;ct޴d?&.k83(].m{A*Gc cO`b8'4mKׇGoyLZ!nC.Ζxj? U<&~9/5H _N_ Mr=ܪi M^NW3%9?S] )8$\g%ۑ -0B0E;<]Oנ9EL V=vт:x[u1'̓z@5AsnAھŻ^BuDM V [lfKNhc_iAA9쬄X"Nv9: P V{G7Ft3y𢼧dєw^>!x4˶wt\*G<.XY˛{[p:h]Ev(Z!};<_ DFNT^EACxD+ݷI,OrZej-z^ӪZDB&$'4B.7iՃIz2'1RHHsŹgNF{TDǍo{ө3©nt#LqIx}V]oƲW}衽(b(nO4Iϗ\r#[,^)r)U$1E3ݝa\JZQQ^A DųOCs!WJt_XU}t bmVV9 jH[ "UujNEKyIR+M0 QDk2GISFjRL:"5kʎFBrJXk8?S &{En&GIK2/VV|^XԲtj.ù X@67oU | U, T )nsvsy,97!ILUdo3pB!z{I/J*Hgej2g4NqQsF8sPms(%<;CV5OcD7SM]|{Ĉ9!0#}GhWF4p<mc=YM'U69岡)DwvXlP8u><.df9y5 94%Wt 2:,xP6 w6hg dVg$qUkto 5*x1GY+yC>VRy;:FMSܒy 7bJY?FB5kYOjsJy l<(Hܚ<w^$#q|32+)mJ)tsRBO[3xrڛji{-Tۛ97-cTvmWMICwj(Ῥb?&i %\nMdˣvkd( 7$!PO5"F!:P1\D\I, WEa8_o~᛫ |jcxӽ*=t}`WO= Qɍ+*~Օ,MVǙRs%M(!L `ce E 1qDXF`@dסW%)U"JU^= ?xG<<"V.g@<,φR '<@}q{O0Dx| /t¢Z/(rK[kE-bL1|fm(~aΕTG,\o=H~8{"PO|Q oGetj#L`|j{ؓd|j\A#O];6UocEvl<}nX=Υ*c.=p 9Ώ/{gpg=l x.43OQV'q 1:)|3 -Oƣ"Y;Y@W!INgPjag8J݋/xǍu;o9pCM~p#h@pP慮|Kk81)-z7a \q3v=OE4O{w!<u -Y|#L1 ~eRJ*&ubk Jd%&f:!۱_{W01u@L>v]lX6B-tS/3T+.+\vL,vjAf=w^RgXbI`;~0Dj:]w~Mwj91),]S7So@~-]RM n/}GAc=<I~ dz%Aq/u]OfB{ UrN=( 4WqjfrqP!5`+N6vi>wNn…l`%^k59)4'YYx['JcX/$uEIZIdYis[r}kiv4 :~+/>w?f%D9 .' j/w|!'jqCnl46ɲLKcjTa)0,OIMp%י`즊b7\>.-u7Yoz~2kmUG$Hq49VT$x2QMb1,RK04A`5 < O6Vʂ,Z| 䃵@^^"1ፔt 0$hxu%_j[ ")M"PQqdD7(./PaNfmpܺ^۷UIXp6T^XSU$DKRk9C]ܮ.iܹv5-a\l)I1D164b(1€ 3/^j2PLlR rYX%98F۔$VLQ0EQqbM[M.kG &(mNӶ@EJ ΢']Cxm4\'ZJ3fI J %"TZZ$+x"Rd%(h8Ơ]e XKRޜ&0֤l$4L @0-iF vg5 y4 еϫQCp)$ /0# kl'&.IƗ0!ܐXI?ȍ:/2An) SUu+'|Cҁv1]%1\t禦;75ݹΟt٘lL)XT$N9IHĔ<5ljIbSm:uܼ5 uչuy^EWځ(3?0h#{ʪTY&\w,%oq,w_T*hؼ_~|I+"v@R$+u0۷wХ$',rh ';//1?5_]'痯~gWGK}2>vEzN}->n_YASa.G:4H;qZuVf  XN:x΅TE 1:%767,yY" %8#έR7,NA>혱:QЪjcƄT]Ja:fy{aÝaQNFa*qUg3‘XnVt O1* e,* gp 5SŮ.ǵP-x-i\y*%֛Ȕzu^LK3fceS,1 LDfYe<ee*h!bm0!22|+> 5'kiBUu(sxL=AHkAZŊф$\!P-$ J"v6V6˶nt-j5W,R#KfD4^M\XQR7u-ct đNTfXf% %9̂ N%< Oi]3fQF'onP#+2;꾣J{#Z{l/ӄUNQWj/S-8 n$k޹Cx7Ǯf r6$o>VIgK:<*78YJW[r{y׻a?,w0-.ADDGAo z=LfY^X ((wdTgwFsI2+m9Zivgp-( nqTug>ٻ6dW,H}C{9{b'ٗ,JLt I%.߷MꙞ22De4U_uUuUwՖwj*(Ŵ@cZ&Wj=rRÛ^9[lRX18[31)ev7ʙVh{J"Ehbi*[aİSD54.߂,FK\ԞfO*6Ql`[Bs8eY?\ljr%Ls,BPwdJPݹ=ޞ@9b] m䕺}6+10Gz%U%DqOO۝+d{˕&"CB멯3gxKFtrY^DS?MM)%E9@GfWLYc1_Uhs6"\UD ʭdH8.xp֡s$0Twkb C_\ZsU&5aA]h!0lPjLa a&8\JZamż;K#4`L WFlk>8Ů-Q+B b@ueUZknTb9mEl`jyn0Thޝoeaԗ#h!5ɿP1Hl9=h5lQ׭[ך[z"BOs~cr1GveʼRՈ ե: qauT=!tu C^[*^*bP#\TY9wyU@jy8Zt ~1*xCT6Gk]ttB.tq[)Dttax+6 9\;AF; c|!+M--Eb?p@7ʑઓYUjW] 0]-y=&׋W|e F1"Ϋ!$@\jߍn'wq;3|{r]gHV oP)$>zs缡 ]w3L7FVaVc5S,P/*>O$mN\Yӫ O_V~٫|ŝ'/fԟuxGsuO FȵV3;a[}jpVœ]c.TN?=8ۧO\Q%V':R, dnoF7M 8y |3oT)fΠg̐9 9!29!NS6^ۆ7blΞ\ ;-972]fkgЊ+"nKτÎN4!BJts8 5~!XK'$QG%R4'6_ʤ,ȓM&HլC_ťRX(g<b0-7jU Reuܖn?bo47 |svK]d[K25z'JfWjCcb@$"4jR{x mN9o׎̮o4ű ~ N7bCC)d+k-wCİ"H(%w0T@)*S,RsahUW(?)wA#`Pj&ʐۮflƐ ļey 'w˨&rR⬘0M]1+屭NdR^C`+Q"zZ AWDI.k1Jȅ;, C!T ! M@Q"i!o+eR9}#m>@ Z[g y>e w%(I%\;ydZ2}W"Қ zhX}F@XkQZ o kJYe׊X Uku+)5q!;e^zS{e8L&oO&wnQ59-~o˦>L|nj tm~*AJ⮅s}pRg1gߤ0 \`*`JkKw w`]?o77BzS]0A8qё|"Z"S >TKyJy#:c4nq[1ڭ EH;xsCrJy#:c4nE0\;շv+&4Wu!!_Tf}*N HmoMe >‰72Վ1fg~5;XX.<9]t٬099 V6X`G\(t@sƓ G%5,u6Xv'$Lr/?]_JDD1>)A2 jVXKS8SU0fgm$p^)y)"Ԣz hD)oK2yNq_{סNmnU\?ݟ?4kG0zڴ~W0$}gEh ՇOY)PI[u^Q[o>1Ff:_\c1{"ahH2X E@D(y0+X0'\+BjQ.`iCF K&$rh_+e݃(ؔ$lB ?UҍstbcP^}.֞a^ge&J.io_CcthQXp(K ٓ.Iro0:* [PY2'p.V/ASL0cl[x%.f#y΍9Oe ȑ Ez,4'86u,)/հc|Ed?us7/sf{4Iqd (ia!b]X F:g<^anrFESq|ntG4\ ^]*g`:/A5&I'=o u$yaG JL+Ob0[[{55Uk?}0~DZ;8D^}E/V[tf0;z jaVq+x`K4d ӏatGVJ`{"؟A;ڱ>quوl۳e#2ްlD> II0GË>v6hLޓq#W/a@,6yddJFGfFqE+$4-}R"$Yb a I{CB61jOB5fP˝Ⱦم)ne #FI{L#FRD%G߫d_D=Q J"cMeͲjvJ5$ HnaN/JcbM 1’DN5KvsЅ av&wHQKRZg1O):~_GC0$mtf&OGJSj:^k:(ÍHN}w*bitK"s&9TDN$3eHJHo6:LzxvM4Bc9rHM/x{|sT":ڀ KsqAXv^(wģ,ϱ@rp{#ynDJ*ZDDO{<BE^_0I =+J2)5Ii\R))@*$ftJٗ$iޓ}kIS nyKYcz)ΉKZ1A&I͉Kq͉sMTS"mC :p/b85kLbs;SE * >gW󣲆L!*j۠ZGߴ=~ f5rPcM*3IVZm[IO["*S[2JVl3g"gאL,Sq}Ӄk`QpmӃk`Ѹc\Oh~iܴ+:[&=\y:u`A[}>?yncJBzh' I5z|ԝSo)S"_)$ipt/5 do8vS yv×ޒ@Av">m8qF.71z51kbs־,*w@TyA%Q[VZyiD@**˛fonvۯ(j fP{RB0D"-ez`ٲt"Ν.vJ sr|S[lDPQ6Vi)}}æ9`=#3i܃2ؐ)6נ25͢(u0E%L6RX!0ql;,0.@UE^<2gdđm'7SG,ɫA]!yod-׸df@Xg^[J <\,II)khʘY073De$ *4Eмۗr, ݍծ &ZJ$5BJau;A!Xn]YbnD@Yf4Dx' =p@HK|sϱn<^kGv/RԠS5a!VbΜ*ծ`GC)t.6;cpQO@"¬|^[<}Q Lu>=sW]>|G$@ "]{{Af$ۛ0C>ga6spo g gOofr?\-'G)W?3BgaVo~Ssߏs}UR7TC1v0dܒq;a1h8I,I֍f)iO\e(Dx gyu>=7ޔX$?OE4NgTfu+ҋ9ρtRU+"I E6Gu)<_~QW(JkC\5M0a* ZRZ %= `^:}!5J\!xjZfj# ӼcI=|@:&K~!.gЎO)oy_HJN|`rԁQj&7'dJC"UYaWֆikmؐ{|H{)P6X'TE=I4Yc$'NjSntq5Mo;aҲhF %7OlEC۫$I^%Y)SVx=6ϧͺ#0 @ݩ+|yhpnEF _pݦnS~0:6Մ%*9٪lv֬ny3_)0,bviڝJ$yv=W"_;Kͅz'@K$D7BRKlW"97gń{0yVQ<'W.C0tP QJqqqRلP_}Z_fl=Uމh Sˣ Xho9𻽸Z:Yd?=FSc/r@z^.+N$Q-&c`\_-C¸b.NiJlD)X)uHBfȔ&]vM$wTdA"׬D3"`߽oӑ^ |+M8F- 5!z6BW&J=XyTl/ 1q9]%7;5HMUm3'o17{2YGjL<0G.Vwtr˟K\iWɗ԰_FaKsXVtg칎#e1EL }DdKZN;Eܝ20 {X %iumjǔÖhX6뮈YLt =,&S:U^O9>TyE݀;`I5YveuܘNTD)6)K(~X,j2~X,u芫K/#W@եN-&ZDwH*&cO:b cZnPV3=eL-*W=Of:ne5TClFUt$@^ +EAzo2Ptg^v[De&Ȯ˫Brs6ՉP:m(my2uZ~$Dp!bkѹoR͜`0\I< hG;0A~oڽWLr x-;V&ɗ%33~-7UjۋZ9PsAhY^\bC%M6:73D}h[40j50ay+/<-?MXYaӫǔ?h7gDsf0a`%'5/UyEe]xE YG%iAȠ3*X33rDYCFkk1{Yx0\kָaV券^o5fE |?wzFl?V^6~X%t-^HEwA=;=o_a!WomͩX^b?~GTn#~/yR)/n("M c.hl(P"%}_Co$JX>0EPKWFYg- 8_LH8 Lp lDfLJL"ݣsW ~$qoϢ1f^8<ƂXC) Y:RJ|z#V$l?G޽ z`U ?|)ksvyt@s7$z f?jϫ|0[3;[xu~((Ä -o=;;|CAQbm6I#2y'>1ME9}<]]?G:hI«J42E"JkTے'wv~B!.6*Cl%4bIt%g+=qO~+0j1/cXW ڙF$ B/{su.+T`O,9G3OEbvg~ex j%%ȳ>:+Mym4dg i@&9-rmhMbt*;bVwY-.~4TVdGw1hDqvZ-?/2},NZFBҥtʥR|AK_̵hSPp#g(8wݡaGSIeٮ ObN}'mH/#M$qv|5sZI'Q^oTėÙZvWt?U]o4gPo>6L8*үfZ*aHͭt_% ?iMҜJa d'PR,rDŤ7Xw6Xy?!7Q"Pv3/4/Uʎ {NE37t7 @1dw\Pݾ7Cu A>d9K08d2&dgDPFJQ* 2S[Iq3#XTKOM&{!LT\<=UFd.0`/햴f!+M d'r i*W[\  Ĩ\~PFϿۭ3k- 5sY9X9Ql:Z uF(1:IASO66Yo6gy1F'#0.=E BuQh^AsEt=4@`zF:+(&d o8>YL}+#M-6jiʡ/[-Lq%irܾpUUmގO TU7@Fn? JWm'9qSTީRJצ: $޽PO,1ax{5#~(a%|Enn{2qiD6:ާmz?~zϱ:g?K9L7iM֍]al<ɪbWK+ n닷o-B (=F=ƆbS!,,v%qf`zMF@Q,,|_ v0Js)+'1AR 2z,K)& 8. / hfՒ|U6{f mL6$8Af" 5)@21sF+J xrfRH/SJGz2bP^` P1$E5hG Jp_PePͧAZűEDDbJL7Da"8[5ڼNG]iҲfɉ`SedP?AtO`9\5VMv&]*e'P&t ۹gՊ'̂&p[9Zhy`Po r'lIlj<3ь)#_9ٝ`ϴ{,@`M:<8jt~d$lqsǂ&} gJXhVnr2FQ=he>@BGep?*lk4*'ELлncrrrB/JM bڜ[j-)PxOH۸b2ϮZ0,@|xuGd_l` on 1?g2F twqdJknzIm``ρHU wzE?n|Cxq]U@?`?ey.n' 9C~Gxj/w!Nog/wrŘ|;zz_XIJ8:V"3c8S 2c3';a]FfxQJW G}9?txO7),xV d.j2͇={2j~ƿ{X5)櫏.a_vhThGh0#*=KzIb(7.; 7SR .^RBsP~j_^MrB>:v ңa ~iN2ciAOb):k-!@i6ݠ{ICq7yg8^]à[Aa~?( u+"s;/Q ŵ\Hgz,?̪³5iaW_6h\e7gzwvD;Fw_8K6)|z|d;8YIj&'' B`|2EPh$A@(/Ϛ@ H`h,4oڼʩu v6%Zn )AH ݶX /y 7o'1{4ēLFv{1Ƕ PK*&4y`ԷӉ~Ir˧Z;LAC=2L//_yF[![< chV.W;螆YFm TϠw'd5G[3< HO+47Aqm / n_(5+JnvY@Kv4Ͼ~4[(Ǘ Jr]heb qO/[ _t>ҘR(蟿R9)~:J-ŧQ1}f_O_wOKIJ\˿JTT4xc1_{sUJj2)']! ?=au^y|UK=06b9R]zQy4ћh Ya6^cuZkdU0z,̆ǣHlWonh} JJؼ88;,ԁzT2BZ? &23%-~j)A"cJ Ry}NG&Nd-99jV0R(OwKG՞΅C5o*}]}T^ *H\psa}Ԑ&{:-n*j$u}fm@R[6- _,܌֯x`XX%M&Gl ܴzD,c.咾yJl7޹-w. -cS?jYaձ [+[eh)L(e5Sri$ 8JO) y0F䆒{˖#j`]L|N+"r XJxF(584nj2t5 Lh٘,[  )J3VsFUR@&+ahpXVvBTzel\fsˍ#9U_'S9 :Z2QQ8zҒ40@{5s@_,D.IM*cdè VVRZ CE"W*[(mӈta1xad0jIxYڇ,@@!`!KKYhv/Qó^@]n\"S8A3Wht:zAJ9C?,X>\6S,A.<VcO.9g2ӆ|9r_?Ffj{f08,5b j'9TOJ4rvV5U6D*Gq.{!QmZHwv>O:@*UH{uA{ !N! a9K!4chQjF S^m_ZS1fBGṔYVhS.QR@.3x.`.P @J*l庉Dе)$+&8(MnV{/yUL~=w.TfmA^Usfof2go@V R:+|·Yȳ?S"t>O[I/Y9ƲRrXRtǡ+[h(rk_cG@2s1XfrOO@& \{@}뀫֕^/l=X.1v%(uub"P_H%]p7D"^,׮t_/2ѿϛ*G*Ӳ<4-;//.ަYxԸ.D>-hX@p hM5K̹ZۂlWN:+*=cVZNٻrcW dAd}yAI[dt[bɒudM_zEbdS  :gS/"A]I9+0z"$[+_YckBDGqkcֲQ"TpsThdbug*9-IKT̔Q-QX=P0]/;]}V$XLQKJC~ 7G 9g8by=Qt՝O#IRM4MZ@ڑ ڟWh.r1mMۻxzwy:4>u9h;q!&6(W>}=\  &isմȀwW/} ('3Xt##qHGGw$wZfh*_!mzqpUqdVڻϚ6]v]v=C9g\]_0 HTqRoخ>H8_5P\ 7Gzؾx $ +q^pZD3iEu"%`F愴N| Ώ>̯nc4B"iD&jiuB1ɭ2gd2 J3t`z"K^SU>RAҢxA&@rv|u}C9 '77WJ DTC{܋a!3d\syK(Pbob䂲ڤT` MT`Ha/4e5̻0'7chMAIo | a'JiZ0aLY4 Yj#*ױ5BzO '؊1/Ylh,bbS-֝5 1 XQDOAΈKǩD@(DIZ+\_p‰*!*rBU:n )jl 9z*ny\q_" hT;j>%,0Zlvg.rrGBzegr AiK#>W)1]9[(Wy檨>܌*yyl_As͋ݜ4p탐YS%=64=Z1%&dJ):a8nY!J '592S{0R8v%t6;$ A3H2F ]t0GbdԉA(njɣf7hgzuJ296$Ф !g\ڑ Fq([{|8Nf_.4Q+'Cr,9*.srRWBsXO)"6P -PyCEB7ȥz&Wϟqv; $jvWqIU@Vєi a([y9E9%ay{-C3u =9+S4M-P}(sңbDWV z Jӫ\/.K=Q(՞kGnPAA?ane4"-1 ձg[wx4Q%&vYUd5{J)L>Z&!.^Hujhkޓ|c|ql!?Wh10àMIU *a{"^$GZWUgwv2*~y+_?|K2\M?=M?qI/+!$C%$2~ K UmzC.W\4+y)xvD9QA*} 1O3{Ob\  |.?]R.3'p|o?E\uFTŒa[d/|8p[ԞioG.EkR~6CPF"VՓ>z=C+K6+8|[XzuCrO7%loF#e>t[ -v iD+}nN JrlaorJ㵜Aɮ{fEޤ-ӻ{wU}X{ \9&[P`0Rv"֘Al)tJ)|Ÿ')e}TFr7 h.[#HOPc(31T\7HBomnzgDyR!FS'ޘ* zn9Ke}T(ߔ(-xQ_z5Uޓx󀳏89e6^=z͟?8b ɋcyz8`x6^lo#7mY d A^[B{sAYk/h^ Э:ߞЯkקn lOq-ﶷ4RFPVp`=ifV]oF#56mˌ{jyX4oZU;B")eX5G*V#k?Sh~ͶѾ?1^J k?Zd LtRiҹTG&T2`'[߯O;vȒD+!!IK5SVw.C{ 4j:cNԙY?/?1o_c>|O3*$i :[N5XWv d]y~6ffށjwl# nFڃ;|P?}0x5,//9m``sު~uhAK?n뜇Bk;Nw9 F/ >d~-M)_!\ oʛnz CVa`ݱV ǰ/ T@1f0(fcR HoGS.0 &NCєCVhxp _pŖ,#M.L2q٬7.yIZ޵Vz׆s8m=[<%Һ 2عa^jr˯?]nyѺsvnFSswrŻK6[Z LQz"f EJyEBu%\ӌ %$`A {p=;dP.R̛rb4 &QBs^(Wy/h}Vs D}7^ERIQӡ$.hgYn8F1`'kSaN.CTi%vjv1Zkkuv_W*ߵpv19Y?Y9Y8Y9Y8Y$9Y8]Xn*Ci,.!1A!4U(+IPV( E ?FO.fT]\ͭjv1cdbN{ ~v1pvq5LS TT5Hjv?̿>P0 .j1-G!9Sq˹ % 6RERmuqy\3)X\ʤ$O4@K4 q[J,h"5\g"*GŮܒ1Un8ocW$>7uM*)cJi=Zm!poŐWtď#% >`Pk;w]Y/Jt8@MhYeX9mJy*ER+&Z3Xt##C]HGG.Y^]2im  ZjKL %'ƢQd5CߦP P# 8bao~5}SzZ8Ykv\lwӷT~?[lLg@ #^( #%Y2\s=VjRfN2Kd eѢ;2n-(uq#e_. )],%$vq*06c{CJ83>ƂeY| ~FWbأ;ǗF+F Cdvum[:sp^z,]Ǔ7TlVePDb}q{Eo5i{V۞uzy~|R?r=W+1:#ﳘ̓N@ 't^WFfFf拢Jn<@#>-@{բv9۸l"TU6 iӕ.P&]-~J(w[.' 7 ӾZ\"s t{Q7ӠЈñAZ}q;+oxb^(NcWyȝ"0JmN^yv/d:O;4k_ =B+.=0$VapacsT%׼m%ͩ0lFup1as$6ZafTP͕KjV1T~Ŏqf al0X) bĥ:F\2!WBkc)#Njr!;FlkBwScH)ٮ`tՎM3CICOAw6K)(Vc;fzm,;=Ň[tJç@LP,+)wMR;d/NH(rɎ=W 0ԫ([_)v+lj9G}#Sj}Ɋ A0f ۜlN57Цͩ[9EleۥpRgT6թ>_R.V;zRRnSv$ΆsJm { ,اɍO|Lۢ,~7]=«}4/1r1}H?[oGZ:}övKtv8~S]1e~/vj'BzݬBڑg.!2JuSnT1Xu<-Gk<Ѫ֭ y""Sqlͺ1Rp@A u;-rJͺOukCBT+ձLnqXP acۙIi9iDL%䙋hL1r7[7])>u Š脎QǺVZ9jo-xU[E4H:׬nT1X^yެ[DZ6$䙋hL }7[7!"fA u;-S3 hUֆlͺ Ď-*:FvB߹[DZ6$䙋hLQ=F[7ZnT:ֹ]`TN[ E4HJm5zݺ ~nTZǨ0/^|VZg.!2%8Zuk1Y -?}jI91MMk;ucl̗mUB] TR{̭mhFϹ"X<צ&(&Cl)pي9[j's֩:v5].j.\4j'kNpyjg{wiE1scs1 SMc/ƘCd>1f!1c̭jмs1f!51c̭js1fc9ĘA\YNJs1 u.,@c1ܪ&p:c21cnU^Y(d1sJy ]1 1cnU݋1kbcnU4֪{1fME1s@^A`1s9^|Hc1ܪ&HcJs1 Ř%BZs1 a:cXcjѢs1fe1c̭jcs1f1c̭j`Y}@1c̭jݫcȎ1cަ L{轃 Ma2/$_iY@s&O Q}f8k>ˑERP[%,5.9ئRc3J $N)qq8RցϘQ˜9Cv^=?+`)jRe a.ܤg% yܤO^< '`EF|ֿ_l_.~gK5g"ͳ̨L:O'dO``X#._jj~r7xd?b§{?!4pӨT}-k𨊒}-?gC0_Y /_!q:[!tY~?a\Kt T2+͹m6Ks)k!nQVlS@na$(9.@Vt)i95Edjذq4Ljj@Abx3hA쬗Lql ,!6;Y;`Brp2O%!T> ?i:[&A$?W@IxX_}"t2q3KG [*5`5̯[L??Q>|cz5~/ש^[vt`&a{ĚӼnb՟lB;lɎ!/d<C`rՀ/4xl ~8-d|_f6sB[,((Nb?^]ލkrŪ2On&c'߄y'ZXc``aaZCkGՋV}+Qez3oogҠSy0?Ɠw}T˭\貊\3'xJP knR)8TgLTN+5K1e,|8yXVB-d8JN̜7f4< {:M>z7gPOC8HCoyɬ%U6kdgk? E)xN0ϙ0S'bLi8w` bAi"< uBhI(̐\"NléUif!ĥ [erI PC\sf5'ePZMQ 6WASD;(miԂP"}uQA`*#Iq!1dmi"2P!DxrBgE/)~ ?-N{^>|^snRN&~Hgw{&_++3$i2X-c:B ˊR"O©G4΁/t{{ |}ZX\9gҤL)7΁5,O<`sqﳃ ~vl_'糛sn/~x4rfvwR{}|fM5ƼJ+oia )'%<2@`x0):SK>Ljn6!uQĿKkqJVR[dn)%FIRXPcJc_X0n;Ǚ K9Њ,39lg<$u*`%*b웤WrXyLZfp,ϼR gȳY.4>U2Phvn4pM}"\aV.KϗI ] btbMQ^>cOK|sr~v^! x)@i d!U/ɇloWWw6IwK~FyOn9X\W ep1@~Z,ϨA>&1.ɧH0HϽ# ns*s9`PD6wAOB~aR7MOēT/ON1߾h0!F11rL.7L3S$AqΗ2!do17$O "j1Zݑ1ؑҾWWnVʞk2%=׃(ʃ @%ඕCW\} @UZKJZn0eHA0(SΜIL9ér)A$VY&/a ^߯X`#%f*'!iX'rdk4T i&AEnL{ZZNdǣ'gSʉAϜ7aoR@w `)8{B~qx?z@3Pb[`=3VAC%!/^H샻~>}aTb-񚩖oSOݦ^ oSIW GY_Rs)y)cWiBRZݟ=H>u|'-* (wxW%J \聟`z~6,+}69'^K{YN]>vFĀh<%K+e.sVdVXLs$9lټ-"-Cz, Xfܪ_?H(6+kk?HW]>ȇK%&ՠd Ƅ|9PfԘZa"#)X9R E"!ZT:i򕢕+U>P͕DY0 Qζ|ga5$IB/ lQygؙў^yű(iHm{0}#X,b aYEFDFFD ?V)8G{jַx%A18pPXdi 01**)pEj0QξTJNec(}tZΨR*M0,LrQ6|'nrF{x,(2"nsN'aC5iS݄bEu$L=aT?x׏]B+uIh Efbv}߲3kVk3Qtr2,hf`[xL9n}JQ[0`6}q}6+ =c=weF`l4kU cGhl(TBt_;%MXG(]ᬧestza/>As/UU_IQNc B| hǛå|(ղs8( t2GQe""E"x,0 m~K+l1}X5_.W ZY[-ëڠϪ.( 0hW<ؘJ.Yup\3Zŏ:s)[:u8%-snkG-ccѹx 8h"Y8\µǢ0t&Oc\Q۝IY.El\5ȸ5;t̆ i"ye.LMO#!!$o@.IH9bJgy]c TKF;<-~v_5adRޙdjZ>(֋lۦ&! ?8`:\Z+N~"$Y n]'xIZiR0!70rUo7K ǓO4=ߦKNsD0`b2W%k;pA1* 1q򵅙,i,:}YXmaU&겠MGk̆JcLbǸN BijΘ!:L㑈eHf;_ >|Y+vJ_wM끁-#SCgj KuM&w5z /(}5tTk^,FO`5{8Iv`9H"c1v:= cXw<;KIqO;{ 5P 5Pr*Tw 0F10p ~kq;Gx̭,"|r/j;ˏh$bSWMMG|)j8 u.k-@[_x!D~!PWMn%%특 kks/֠> +T~P4ch5>w(mZ>֛c\~!H8%:vMD %~.\rԋƎD͡*t3IG1t3IGLaN8ʈm9%`*5AKýt(pJ"H]bdQZ& ѡaӗ-|txKSKbSj\u:uxuf&RĤ$faoW!Q.9HF` R1U܁tbcfNy܌}T:)M仧0nP k]h } +-n-h6iM(nZGMkΠ>BT9(V.* 9%dE,nӗ/Рy(9 KI<A^jFqW3QՌʻ Az!.TȠ6#bVQpG@b2N<`ߨ3. _L"+x3 O6 ijGkuC+Э`cP|xvTWwnlxv%~y#h(8՘bmR)B\-L1WLIx꣫DQ@0ט0A\8acf91簷pkC9!:I*I)xAeTsI-Pq%krĬZ]<Ҍ}lEHS,v1u2aה.{3q lZhȞ~6lDڲb1%4xًK'=A[9hLsX6h bEbZO ~!GP$jȠ ' ȋF. >vט 6TUAi2HP4Bf+hwX{X&qn2/R,x)fs.58("V~h))\/~^Ͳ TwoP b캰M P(y5i 6.Bᩫ9"J cɹ,1췵ZxVE;5Pŏ5TנKRP\6kn|knV=g+-d;e}Văeu*/"*+R\/+K}Z=UR6`|vuTC5u&Vg^8%L'j}lW^;_c@_5&29C@(+.,.Pq< DlWq`x ?{s~3<_N?F ۜc2f)oϭu@k2+b:m^Lfqo/M'f9n D"v<3fl$@rl.//ukgURkJ X7[mNL`ܘU[L~q Ȇo;FVtor?^OA2*Bi2؀L4!C=XS4dv"eg%CU!a-iҋ!/n+yW*kB[-ǫ:jGrY(dW\cHΚg]3Y }z4.E R㎫/d 1f\@u< qBkqq`Y-2OS^3NgFtr8K.6Ӂ  T9/xHQ s8u}#@J_-͵Ti{LDz9 ))dw5m&|fA][o#7+_{$x)ξdf'yɁ&gd?Ŗdn[}lkLb[.~U,YU-pjE2vh(.LL3|Xyy+\g$87(MjiKt})VL+ʽu_cCӏA]$W5Fo4}Bv&z<4WFjp4GC% oNXWZN$ }t8I ʆOu6|L˅:4ca:ʘ4:oo@Nt¸-vVs;W%-?˃9;ݔQ>6U l&5Aؒ~r+~2HeB^uԱ9S2/KyrAO+ < 0A|ҰO VON:ubL eZ!x4Xpw%;ݕ%6ŠMe.@ۚ\~TN `~NToNv7$4NUC'9::o9g]N4Txu3%{Vi5h4: wQSWCicw ȀrZF1CC-s0FU+TQB#=3W@PJcSGP=쒗v!>( J[*iAFk/ØL3[0/2\Sn( 30yNb0X6y&TB"7@& +-8J\?."o9h[2—75g~lWEmTpDYӪPʎ:WwBQdNY-Y}l?ySܡ+܊3j'g؅ghXc;Dqy4MM]!doj;Z!A֨e+&x$XfV&蜔烡xP<,C̜& jpq8m˜&p E2`XTLX<-砙 `ԉ0(4on<:h ͭMЎ8ПWo(Z\ L9 yaW[rU# vRs/c6rI~8CJ?iA0~廯Y<1J~ŏ??>R|E+Oj}uI{ڴgF?|d:WLJO~'byͻ?[G :\tq33#Nz4"2YWϗ=ϰTA]*)c{^V}[ta._Սp,4oO (ޤ2uN1wKt\J.x6H,m36V/j3AwgZ[RY0yw ?^- lH~~$gﮧ>cխa{3ľQ#(%dBee 1 TVCNJ8 $&G90j3ٌ"3 Y! P#y挱 Uh+M.,Z;BkkfVUL[o~52t߫2Zzdzŝ-5s;[d:2OXqZk6Џ8&[ eF́eӹ7&81y$]*yi~'gj:1ZmxMQA֫(rL"QWrT2f%GZ$"59,ڨIτV SnRk3h8X+9: PqFA^ߛ5aMQF0`)U0E1)׫#jmu,Q3E#[KGKTR*kQ$,%GYL$(Dihq?'+Z0x:g%2\ƳYIsufقeH BvJS\NqZ(r*su6}֌V"ޯN#DsPvcۛ#%Ɗ\44v.4:F) #!tg0/!<޻n7g.Zr 7S?{ϔ&YpMFޮç'U/w@l-!AՆoObLWMTW}[P Ij04EbizF^{/[UX _neM51`6o= ;;._mOFqt5Fb5۞ZrvҨ:[(RͿ/պZVј|}c}Rni%ZʑjL%Rj0'2wN,ȼnqh5Ạ]<3.V\?sp1V~;f]ćw~R~Bh,0…G.8gg\e>~d+i1geA@hf=1+O+U'd!oDٔ_׼U;Eݚbb:MQǻ0"jݛwkήڰ7nMl }Ĺ,; i:퐻kͻ5*ѻa!oDWl$ڔqE8렬)2F"S9d>'3I9Wm("LN!6ET[CaH0ܾWqƁ8hk.DA繡V6Zy%$':(DہԊbbș-<Ӑe7rqk% t+In:H܌@1\jCJ&M5k`z )s )Li-_o 3#2g*eF eY`0^(÷zphH`ZQbBGrԊHR^Kx-` F+|'@$& i w.VFШ\zW熅ʲ"3D;O2SVl ZcN`KOHEP Rcm]AyˊxlAWM2EPdN)yZ"0,6y nj]iDΛ(*GmuSmY1(Vʏ(WE1FjU"ʐNKpC:ZƤHB2**Q(-XeE6b`,linx&WWoӭax2gH9}n SvԖʒ,Dz2(ͮuEj:Z@VY*>Ư;ʟƵw)ZKaɳ>LO9SpcEvpSGBˠ> qӮk]]v\cbA2a\KN+I+Z$ 9bR+qEMg]h+FXe#*=1݁`lsYVMyx Ɲi~wfВ5z7?%ؚɨg4zݡ\JmD=y*ʾnD@#NU_Ukf5_jTB=nJ޿Ԋi˧X)yEOJ:#sHuOn6yfFS[ {&pɅ] "o,Yڋcm <|N/%F[s>KE]`V0K 0+k2T.H-,u7Bjr M-2CxXEe,X:r #Хjm;eê䣠cnAmwԢc>X(F:U6t-X7F6U;iݻYݚbb:MQǻ.VGݚs%z6,䍛h#DAǺZnVYyޭ!IܢRyƗz,䍛MDRdSʠOu6|Lũ7}e^F82'. ?^QGefs)OŒ,ˎ%!iXoMlu5jww_ IK9LC/(RtV RRWuxPr XN qˌU"'{o,8 z cJ2< s Qѧh,AlL䚗v kUV7Fa[j۬nncK $v6! UG$Vt\i8^<ەr㴏׃z+n< 2 -8fȵT<#V2+ͬ\z|>/Lpou%R=Mg]hxFciͿih7?+NAIawM@ )!߳g+['kkn$'g}C_T/gR]^;~IWYJP^?!))R˵lxA@40A+hN!BY6脔ѨЉV=YA ZGi70b^xU`9;B!IB`53[덴3iF3ՊIx!ո&P W%rP:$|W2"w*6T2!Ah獎mFU&܀\Nr[%j3Jճ:3ŽC)y$wf.{h|Q,蜃9<*&0m❾+&B#Yz%e+xtzsa B흵IxƩ|.16i*h5ɠag QS2}ɶ2f]{fw=l#Мy89m#jEmyc#N{ Sra}XR0}Zѫ*O}Mp 5 ._> kC,Zxj@R:X\Tc:(̇Fh f/)_NZ$Bs7P8*ُd nqBP]hN}!Z]T1ꌹ|hC){.BV5sj $=A|T#͟bQg9s)/@7spi&}}$-B_s99R={^W*Ϧ Zd 3\:eN_u:KXÝw_$gוn{Z@k@ӾYF 2F=DaΜYB7 `.J"=GS&s!tӈg@GCG ߵ:j69$R}TQ-/\c#,-O$ٵ=<3QaR'fRF<~.S눽4_}4кuʊGqݛ20VrN,N,S"]:Χ[VZ9k4Jìl6oJ.ܖH^ւddfrpoq(K)\K\w.%XQ9`uW-?ŢpPVbEqz=eͽ&FDǤN$4W?376Z dbp?Eu<B4|;oدX4論5nmtׅyY`8Bx5۵oڋw< ؜iB$>}6[aO&SQB#M9TBa=>Rjycm͎#*3쿦xǫ¶joM#0 婡zu_ӲdͳP!@@18 W@8 6#Ƭҙ͙Jݦ+ج+laHPTn6sBٶQߓ|ur{;q|NBR)SB0AY$N{"EZӁ{cK "0UlvkD]>l}'elpjꯊS*Ϳ/Ѽvࣝm_f%l'3}|pYc%w }5yX5yXjb?/@ZeQrcThAt҉F*P%Q8(% 2-yqI~AuUAeǰ4R&?s0~ϮjFQ,j2 {)ZWnwyii<ʦ(ƣi\$W"ne j DKF p! Hb<0Ni"mJ>ł i*zӞ]T:][hkTBݠZXGYAmdFO "j e)#QhCPWZd,܁`:.K9rhh,w\DO$j"u)s Bj ?}jVjAv}P[HЍ9HHc,)Kggl3(IōQ>G+߻JrW]i\̻//>=X<^ *zf%eYe~|r7[?ы׷GXm.6DyL6}}ҢrYT3wowW~׏77L623S#V&UKI9 [P{fjUC{fK0:`u(QUT=3Vk]¥KHJ>e 3$jpLyέ Z>gҡkJ./6k[(kh#9V:k(5;#bB`4Լq&c&)}ҫG[P3Ĭ= %8!ЗQF#dkJHô3,Dy 3!Cxri@@6YQEՊ| 2=.cQC[n0e*0>-6@5!`ͤJ-%$78E]+bl~n [6R7wW4+6>9AƨGTҳ\@~gz4WW!VFv2 d0O\fo=%OcC3_ T6 x:hPNo~YXXBkV QTP0`Z }q86lo4|:9>X Yp+FK?~Kρgmݯrb%Z絫ʯh! B%IUXb4SJG O_VT7-t"_ԩ uj(e @CK%pj\>Fah1%hYS+49Wk%~A 2=EaH7<$MSlEY ㋲Lqmp%X.Vhq?#m vÉ!C9n "%[Ɣ(j *AdjΞ1o  \,{v()WBJ%*jZ dt|bk"Զm*^]0 NDKge` pK(() +Q'd:Ü̴nSfLMPI*:$H/!*(Ph z,0s2X.EBbfLZyPi J;D7RDSʊE48/~]b%f f$'F^(=Z4*c2=DmhJX&j(g v$"L+ǥL`ɄHO Ri5 2b3JaF ZeSg-TJO,@XϞ/ZB4&Fhfr5KU=l I3N3TJQ,)P~$ y[(&@-fJ' (ًI#J˕]{ÚRJP )ȿP-z< A5 G-~i5 qC)ӹ&y#u!%”fShMqg% c!geq3"~Op>wȴ{5#STiG'[dDՌMP5hP?񻏍$5CƩҪ/xpj.n'&0I'lt*G]r\>W9;kѼ];zp{0 ą233DJblū̊۝L}p Sulb'L~eZ3YLWv)ų*/'YozfY{Bv]gAOž~=a'b5R զpHzjVl̊iVvۢ0ʋa(/Qu1Toz)DX P.K+xDT)Ϣ W1*EQ|''}b[k7;|r5WLjmp|QHp:JQ|$ n8E6!6*u6&+X Y1\5 0=-T+}QmFi?~7|91_zs?L_1ы̿8Nm$5ԅɳ; GP閣 #[vYUd`U>u Nlp?sNmEMBm;v{4&~]Kl7u{{>Pۭ7p|=ΣM6ҵ{[QȨ/wDU_&w(k39R)oq7_ta; cUE;/I.Oaytd}4j_T{?_ xC|;i~eloUan^4 pA8׫md%n5jҽ|%toY q|=~?\ +&=YHOqsdmmHdjƻU]ںKjs- Iɥ߯1ᐴ0ۑDݍ~l]vk_u'$M\o>ʺ- gn։8[^]^]4/Wޕ|*nM}i d(% 1bOCe6G-񄔆b\܋Eq'IzOfӀG3x>"WYWPk/m:f@sÙ^+u}!Wug4e4ec,x-"||HOݖɇpMe֒}óy#YtI*ղ2L#6oq\1V͎<3OJ3N)d?D2z·J>9iNP+D^~e꺼Rz|EieV*jJHl T"#RC:opxhc3jz Ԝ<_0`ƉZ\_pQ],B!J-E24Bgj"g=GIHC\hݜ^.Xn{&^- DHզ sք= ϲZO:ye02x2YJfvdL(t}(ZBE.3$M>C|nL.Ӝ@ zAX *5cI<L+5&bh\kL lX㴰{>*QTu+N)[ A8;FqϦS H Bq+Ѵ Qp]:6ڃVۻh /n3NK͵<1,V+ԯEu2ԁPbNA?Β$h,3͌Z1bO0o+0?`švGB?vlSdجz7~>dXNף 4tHMxUfai>ϿuE}X7qrtL+_M)cO_!%vʦLԏmGogt&f1K"ǻ78-*֤T6?.JkZ]i6= pswz5"x<8TJևDAܗ5Qۯm?,Xۂe4aN]ڽnys9W_ujJd.mXG1]-zeim_/\ӷmClWUi->lPUwBq# ~p|-b E)[#Z)e`ଈ3mB3~. .d4W|~oYH!^ٖA(29]+)]Žs/a5$b;gҰ40ZBp :D[+=k<{T9zt4cYSgh3iDŽ=t3m h͉8`?L?#*6J(6UW:Kdz퍉2$|Dj21i$YLnykƜr^QeUCO[oӗDK2FMӉ O? (a/ Qگ_%iJ~UlrWD*ΆyjvP)2  #%RGd?4?TT$\qR$:.t9Z "ROw*՚ѩp -$}.@aЛ! JUǦj`$"ʈTtns,\Oo5̲DZ~zgyVFhAU44E/3h&1 $ 6.7 >ӭ3ZTNFX51(=U2ďa@+#Iᩎ( f-ǃ4(k`RSChr*1 W Zlxc8SB2֐/%҂(Ln =!Tk"`S"R;$Z"7ఽ?tKuCҴwX2Hݝϔ pq8 o%ڻO3 a w@@JӂKҔ E1 2@4?|1+Tl7jIԨ%LJg+0AdC+л\P4oqNeD `7LbfgۛWp+wz-FQKL iFۻcC>"̖p\-boDY*_bU\RIߦ׌ٸ ؄GQTQWvtUKYgϮrW;Z1CpSxl6[T=F ee6JsUEw&~3 6nv>ٗ7dJ/|7If,/j&e*y!y;<ܾvx[_ 0R~sh罐a>z u﬿9й 4 h;ɂ1Dx] Qbz7tZ%Jxł#(H.cIJSmdt0qWAɓ&4oNl=|]܁@Fjnp'ʏШ}AkmV bUD>h(aK*Idft}b/}.j]zq˴˦59D.5Z&ÔM}<f*k.9?&be0 /W>0-~KE=k >}vHWzLJ~ 0͘䬗FC}a g%rTT5qGGjeO#!;"k<$ad* Bԥ)F_k+NIV52V0ݗo|$WI \Š^ߘFk8A)K2P$pFv(56*Ei1]Tu9^Tlc=sj"f3[q5'5 ARM*.Ԍ1ڠ:.H6=.DYtObm&gm1d̾&ȲZʗb5A-. !8RRh3M\8ͽP\4 $0 Vkj1dNJQGf5DPJQk[ϕ.2a,`qQ;< /:cj4Mg_$Z#|Uսow2ʏJUHax W  9S_Jĭn;<+Ԥ^.KCC_y#.qoi{,&EQJRxSiy~"sP=*_Z疅iB?{W] Tx)V %Y'X @l} [KZ=WV[GjI}.Me <c]IVQڸ.9w^j׎ؤ}8g~riyq1u07^B9hm]a44XwS<^МNc5e:Gl{ww#Qhct *ǦQ u ܏JIɤ1W붙ˤ\"[kx cd}2`^}J/ 6R?` Ҷ$:$ܮepFVTDJb􂚚t'16 QKɹÎ[fIH&G=_ h+Pe% .*.V>8`6 ؐ;C>e4[9y!y-rqџduƫcRlPU"jo4_cCq)8oR|&#Fl?E;["6eV jD!mcC,Be[໯'' nMm*H #Q!za[&X;:wƃ.@Aw,`3mWg*D6-;I lÝ1&`n4ɩ݀m<>K#7yn4o)g,FĔEۮƴwCvQf%uCѳvRBledЍeJ,ls0G0aˆ疷zW =W ,'\ Ur@4tAVUܸ $(h1Q{o?Nb;~3]G19YgJII]@98ײI% >aCɭj%[W!O CM"v%16q:~$-gm6_.JxEEHxD1A٢aKdF*^6ƶΗ?OO_b^aY< ֮_-ڴD<[JI!#ׇļU-?6;Z_{G9Sz;_4DEi3BMmZKI'12\U|}";TeTS![ rh=T`.E[)vxpwڍ-:)mC9 >  %M<` `5b5뗵 U ˡ?ʄI||@Xe*2 iJR-s-|Cӫ/ni^lRL4W/1+ܽPc ;1oRlDK¦7펖\Vu1z1[Eoe߾v?ƃw32m K\G$Ϸ;vd4>6R6^ H 6Y`i;tʿ锢IARb$s`Y:uz/,U3畉I>Ujo_۳׍oBJm1>_"~ @O-SrQX괘29gMgMML]~;Y'"JW*05,bHѿcC0CowЛ1JgbovU.s{ =KDo(g= p$Rz TXҷ Y3<83zюXH}'bvRF9)X_%=m/Ô }Kea4m ^$}}aCW]d pPdけDpʪwnwVPhcHԠ^G2BCu®e -bz]d5x:( otLP]; cl(ߺYSA>R(v``d9qoXś `{e}YŎC=V)nSxh>htr# HgnrjOG\VSЈ޵EQʼ^E\,1&(G,BH|ၶNfH޹$o>4$I0X@Q8jU:Z+Knh'"u|THy³yVU< 뢍j|Uҙ}T/ɬsb:[*6=됒=D1f~B S\chh˧l v\@Km|P<>*UتPm cSS/x!$v;@ںQ[0hRyDd|֏%n9g͎'ob c!'}ڜ#MD2C?޲.W?g1 /N=&\]hƜ_zscdǢX z`(('Q8Ma@9M'6 ^MiSf չ^cBl~^ $`O4"2RC<=|-U/N2u\Sωsdk+xEe%cT'_wSH >_!ݑ:o\z9I}yy>O~ / iy9% ǔ.`ン\OQwUu Ѻ2M6Q|m2do cl"}&C<zsB57ViՍtA(TiXɐza BPn֦nt4NͧYNm#M ߬vd)F1+˅r7 W҇CG@ӸA2z7ytWu@ؽFϧDO3pe<kRיL\Z|;߮fl-U5cŵ5r#^~c׎'|o! |-VB }=ga%Lp= 1FcC-mH-XiOȻ00uqo'bM̪PPZTQ; j v~sz`Ϫ^`C"͖<%&!;# .y>!A* LoVړmg9zw6nh1 7OݝeN|'M|!r5:#đ[~C*'4>bIXPG*%TXUj+M3n'|u |ɚ=+o6gtXPDVưPZUD ԓoCֳR ;:)Z֖ƫܻ==[/z_])͆ܛy> W;y8ygh+!N:zDf7}y|@ _1hĘGys{ MĽG!n'g/*QkVzv2q6U߿9^e:ZY` W;\po>#6=sVH(J=x`ۛn,\x3]A둖>p9 c<+]K,?zv}-߃sI#IǵLQ>x4gs,i$,o!um9tڃOaNUYT16:?bfo[z5q2"Ƴ*KCe`(Y,#m&N<|dF`=$׿\p< M:C޽q~aAm -N 8v>u+y/PQ&FbbX*X#7\n clp֭Bc;tqN[g.rt'gsa{`;XCIָ]uzTWjRGkju֏oge*qQ%XĦ,~s7Ch6(gUڀ1kq6uU~Fj׊ HcAMM:T>AqOT r`! ֥֗7MЖn*+Z:|DŽ֙ <`g T1Pm@Yީ1Z39ɂqe?m5:_J,=wrFG6RuzY oΔ:EhaI2U`z.i*4M<8z~#;#[Q[iZ"Aޒ < و/6aF?ݭ!m MpY#BjtDiZNĎ,X5 :Z"s9C ODL6B6(ZFH䘀?{C@{,cSiLpKb[ErFKQp ʒy݆jyPbe!DH-)mqM P/-E Vj .Yg. `@+$4umum|a!EZ4#{vrB1Xjf+:-AEV*ȝi͙v9N5gکLXs(wF?2%bDb`REmz}~7=-5蟶(p};;oN!s|vov?r4'0|wޛou0dؚ~: w95z9n{yk#ѯWyʍ:{u4$)Ҙl7~2]0!<"qqo3oshcL~*}5Fs?]P8*~^ +3}0~׽la3NZ_eAQͣs5yf>kM|~hEny:od Uu*s.LUZr0j|vq7Ƭ%ᨵyF&ݗ?H|(%& txf{Ψc;k.:0<?]<jX< 'kp>7qZ5#0+q,}GG /svbp ǣxe:8v`D]>|;<'H c{sfl xtA|iGx̓AO9imwy,_O?!!~҉Q nuCCgcn,~g4蟟1Agp]Lwl{xw"=7|\ OcSYOID6$n*5٘i#4|>rՓ\wduhF)}M3G8ū \hll0kVlX$e֧T:"$$c8Z%ւOɂ Π2>%QS q 3 a+X4{g^ Cr1HfcYDBFG> tSʪ&|Efiv]Ƒ\V9ӡ-id\L-ni"{{ ЈBƃ;)/X*5P|cѼm<{4v"Vwrͅ6gjiHcw[KZraݣ7S۹`0Qꄮ? z?/oq}탽@a&I{,}뛔 n{$y[9%\NύN|ea]\re ɟWi0u |=%^R=Ӑ'_]ajtfO9Y}lUG4$o.w;rqjH9(Z By D1(Oʓ(YϘG$ko$/2[kSpy[+Tn[0OR˪²7ee:Ԋ4VO3:fQ1LMfU6SȕSsQ1&[ejiMbmPxS"޼pGü*u!N:ZƇ j <_du Er+ < 'P,5Q V[Q 2wLQ\oBJY氐)TYBFpD>up^tH;hlڌցضJ $_18Ohx1ecn 9(0,@ 'J*eAG/WZyu_$Tf4H/tk$GOrd)GOrd=kf|,)\fµ⡏QV#q^ F\EC` X >aN0pz}ϗ}v^\cks}5-U&\+'L5oy*VD)t]LSpp}V j!XDG"xZ*dY:p g 2)X nHo1 *ZϷՄfYV%uzY#Rm跤ɑ @%ɐ(k+5aL!b1E&*3\*&sBYKd0(`,q#\HQPd$ɼ4MPg=xc9&SGƉĸpt3 Z:(2CQ`J+"e^1 0 fSR12.v#F ̩5hP`(NNX}LX.2(Jꖢ0M;Q&YԔ25eXFqWsej<ofԔ|ʴfMgj,VIIK$QX&"fW?z)) `tOMscR#?cL:颍>zJUm@F̾L9ʄ3`a)1P\`aRW+4!NUvTP 2;L L-ԄyLDYq*05lhY/LHU5Sa4yH%LےdΫBJ#ɥWU:e2JfPHja<[HɃL yF9.xZk5LHm2ΣG.ӒC4U{F`aI X XgL90PřpKz<-NHҙ\ lKcUޡ2Z(+ QK[~Q;ǁΧq'<~ǗUޜBe#%5^PG !p%aL(!L. h)HcX` ixo>Y\xWysNxZΚOY+4S$֚XkOZ BkFoz[iRDk|⋕&R^|&U&VZc۬45|7^[jOH%Cy:)g1!涔Az;ӋDO_ u,(M+L4noW,ZY2wv.S"K2pL-A\"Kԕ\ndfȆ;7XƔjoN\[m<GcB?$_vwfGDaY4ИnL`>Bl](`F:h,+n2U"#ߋݠJiۥڋV?/m7w:[~0VKہCuɽ}9Җ+8u)\C#V*QE %8KDlJP\ZˀNg!mMMھ&i{MM6iۤm'm0ăS(^)m2QN2RC"-.'pG)+ kϋq{cC{I=#H۽+%aB Vh1FFM݂0Jr0&\ qSK@[){Y-ޞRe&n}M\y&nmⶉe%6 m#6uڬTtAf 3)iY*hNݮ g~vo5P1(oNU(fC{sW q{b\7_)iM6n'p Ub53>~Z)p4-㐉Eۂg&g):9 o7A~o1p1pʉ>{ tr@ҹ;D OV&"RD2M$y kP.c%iHlhytPAI4tPAM5p:.EY9'Hџ]RpKY2PoD&dLiYO%Jh \fj.otJ;' w9 ]B޽>TFV)jҭ(oWT`Kf V{"Ye95x 02F|Q>'c]bN` UxR`'QA|J cшIDeZ9d2N\kNk l l.Շ'D#re&0kPԶsI#nYu |H/"Ig7A6%Q`q}dv.L%KPXN) \?]JV3}tѷت֏)ӬT,ŻO= >Zɓv<:# ǟU)5&{qt|SECu:ieS5԰ѳA `ΏAb亍{GP+--b>oY˾?'2TGf1k[#ϤE1+ f+"eYR@HI$W=l>&Ԓ?T`iܖ}M\yε\n\n\r0Udd Qr}M1ߖIA FJD4LJDO]>^M_Xe DG Dq/_ո]iо’D۔yY8)s>-(jHgB؅xB+I N2uc iڶiפm/?6m۴mӶM6m. &TQbކP{YicG&BaotWp2麰.XK ZrMt벇(|>̓e`dHIjHҍ.4j@yq;Xz$9xnh.iXy< p ݂sX+κ3{W..t&9+>:>Ɏ4QNʽtߧp6ԽF:b@+RI d]4P ZDl˂Gq䡌r-GrzIѶmѶm>\v9Jb`I"4-p:IMxd`e B*kO]=*n^Mv{s [lYqMB:1-¨-?{;39Ҿ1 LWRs%OB}1э_}d 9J{eʖ蚭fSvѸh%tz 8{Eo5JGx2Y="}kR܍y [ ^S3p }瓼=6o}d߂֡xJ9L"X+E*-xT |Zon{W/K`:1Y;F3h4F[ VMBw~_ܞ5c!G P(樌3E%!7{j7D_o"{ 2C -C2)CaAP eP eP>iwc{)vܧ+ӓd+FY2InO=r(zֆVvkQ?$c~A1'hø~ OgI*i/'f<1KmC{3e36nE({;Z4ǚeѿ?jvhfvIk&g͏,Üpe۩svl;] `XRB.ۺ<(|_ttFL#\ٙf>moU8zttpm~ GaDx{c:{h:7vZ!ko7o%RöiE~=:>{ulݩ+[w֝ŕ7KAAD-1Dɋ%bEZX S暇ZOGй}P;z=JW|eB/o/ּ~x-Rt}8ؽo?ƢhŽ,!YhXZ0!G> WW礫oX=O~ “ԟ"drZ#6IzC((0K;͋ 0,)_Wzi޼Q>sC؟~`mnBѹ\1gaeR^@eq| YA"trH0UPQmPr]rb''JL6ǘhPy}\rxmk7bLhxzy8ծafJ*LB";y'q"e'<9'20,1.N4xl8xH!:ZYp5t1%Q0Ft%Y;*BK őN/k;fW?8=bBWںR<3YhE]S)Emq:i^!ݍzήH3I&j,phda sU#Pj2 $SGA4yB2a/ Zb;iG M g*o2"/R+^9 Ģdƹ xM"D]zȞK^ M S)^,Me_xI4RZZ٫(k7oN!<'{bcB`4Z)@|*HþϒBNS&M0`/^%-9F u*9"F"D@ut4&ј/VrpX-ݣE/sgii(~ޙs.O5YmuQ:~hJwyzmX+HL|Frr2G~VNOǒK͸`7aӯ?q<*g+m_%&cA7~{tg!3xtϦ??f؃NlgDI?bd8_#Wmت͜ Wi99g~~+󮺩nP"͚y#Hv☶Fp˩H \!Je%)M+#/ "Nhg){N4D;$GaGEᝈWsƽMq[ |xH]ػ˒8T̎AȂ!$o령&%:TwڛF ch L*ͪz pfx؛7t$KF?}'pj`Lc*Q\0C~h.,Wk ϠZ i]&rµxgL A5x%T&@7Ҍk㮼'IZGNnͰY/;}Cit]}:NA_u9'Ÿd-U*4jԔ2MER<,%; OuZ[0*WzY CX7# >$ E/Θe `Ls1pu'*1Dx(FX<V8HKjL,x-9f޼Y"\#޿:+,$Ɍ'2ܦN\Y`$# Dn oO♤)"NT!x]Rtwgښ۸_aiw<ԩSu=9:f06cYT(NίbJ1n1f* ,}>7 dDžlsp> f1S$pL{p"yp@Ɗ9a7 JGNNʤؗ)$" Q3d0?Q  AfEd)-e?V XSn>)2mX1=Ȭd!TQYݳvQ5Exs_D2ҁx4pHҦz450aɲ%V,"tIÐEHIg1]Ԙh %ɑN ^O%0B*y~k`i,uF8鈐/:]DHFJ^ ,YPU>uΰgoݢgSHj,B.1n(W!+vX-ti.gW-OeV^Zu=-uS%T^3ʲQ7dvY(FL)AL%J/]zFy:z .p1P}H|VJ̬"L*{"B RЪZц""9f? -&zdZ\(U&#o@Vt/9Cb/nyu? 45UӟTsu7-`E}R:_`Yt' /uEgHL^! x`n횄:~ʖQ4MuGHmjʐrxi8hMD N܋\Q0"4<(l "c&v%5Gun9$c.y]_(<7E^<fdtU,2Ȗd`VqH30e2zKƌ[Rcɡ@/rHhДLjj5ݟ*cQTG %H T VӗJr8n`1(X0uK. RԚjArk*t>V1]ѱ8{%NUȺ8VhfװHi aqh2'pH&Si@8jQT:򞽵`:fi٠b;Q/Dݵ]h"ޙhmit036 e 1%iv)99 OT@h.+w -H> ef)'g-̕6\YƧ"ksj친kI[)@٤6lv05e׭'Jъ6|jyDRQjL0pZT"CNR㢾ǣn"#d`?9lQb,X.J[Ec(F\ cKzI$G/} Zr)gFoB̬"asCY):{tv*Q1y(F*ow*\ oKЇS(+A;ݛ*RNs&ll ,LSWz>n}UKφ劁y{X6%b˛XZl]fa]&wrgE^+}fӔ=h>'jW͙(XY?ɴ2s t h}2'Vf'^\$$s6hBlr)ꆪ˗&gժ7{VYw7EV3 M&~Ccwjn=׋kYE೾,#;-r~q?.֗Av{#`XEuR[WHbM(㼾IsQ_ U?EYg7~cVݳ _]ܼY]^WY6S7 ;cqNB^ɮک~v;WLoe16_!]I'6rV~oCDN;6vΕݓ"#n.*2)ɘ]c/_Ğ(fe߳;Wlf#7?ͪ6 y}q>Z;A|Gel^-܉ej}~s8t3M6d"^;~=m%mO+b-&)|b!W?bi'9F(9C$er}HG)2V DicP2flî{OoICdqbN q;ef<$j'G+PՇ<+E23}Aya.Шt:Olof<}X]61ʍ.l?,qj'i'1J{1JЗx6 ŏNdӰY"[nps\Z*{' Pd)yF<AtXc6?NMmd֝iN!HZk)-8\ PL$S(H_19~uS8 NfTOHϧ$]#PS؎05O:֓0;-QM-L'H;xjq}ÎH,N0ntfM;Wamfu02O:MO7cSzt5O̖3G~d]~rkYn@R,ooOMeѿX<˘*?ۿ}oau\oq\nuuv޽\.i!`nv7ߤ#y3RG[~9XS;yޝsYD)2!4XፇJ:怈%9+!(tļ JubEg<vOQDr Z-Stz@A5ȴ "WY3G{}؞Npwd(!2ByMf\YP!j+q+TXnWuY\>V:Hg_JF#/ؑRߌ85|_JfѮ<[?b)M^cϼ>pry+k;9 }AFZQ21:_xq!Ǩ}T>, g',;&>JN [h оlUgU5XjމSvvN"r& MߜI]a5[ϣnE`7KNԖg)^"prR(1uYR,$?y@WRʖJ%ku #!w:T>&ٸ$tϢ 䫎I:gb@bc^6XcEz{ Z&" Ͽ|z:&㎡šlOrL:]{۶*hO{V|?ܻrs4).bARTSjI|;쵼Oy%y-%o9CL=2xIw &7GPJm⎙8Vb1QҘMZP-H6nR Nz;dc*sNa"Ļ~$J Kͤp(O>pm!)9x+°ܾ}u%<ڲ E)oU y#%K([al!tXm&r϶?bt^9} 0N.5`CK f;cOFcNRɐx6&U)^3aD-mn`tFX}Z En~}p*m(M|€IE1BIfg NoW¢Kfh7D?^`*cOf>D{<|(ʅ;Ѝklg/$9 $g!,IIocu{*(LNQj J`i E! & Qz&Ǡt V&ރu`EuFof:"Xm?|Ӽhn,Mq~_!pYv@i~s!)~yzyӭs J?]p|Ûgfj 1 `R 0ݤ RF6U8I0r%)'Hb|y8=xw|*cUXwƊ66̔!̍郙RoHKSYo 0IO Jg]E@%4NXAfJn x XDqM,aƼЮ`&$v,auŨt9kyJNhZF,W߯$V*6=fd8b1O(2ϼѹ]~εҮJw/횜0)ͅ4 ˍ-(2mQE7bET׾8eIkR5pI][ [Qt6a,7fcjMml1c%R .M&՜i?/kR]{G EѰZbu`\KHy4  姥=yV4Kq.1Ejq'kG%8j+X"HppcTBl>(k1AL`:x#>#i S[dAcǪ;f.XrK#m_T-ids$e`uJEp[V'%Ň~CC1`0S"J$Hc9UB9U<UTqYmF- }s'mNgH;)=:fֺa<˾4@R 4 ?ʙR^Jx'2_wyW }37(ҥ$w(RK(|C{JQ}Z:^Bid{o9 W+{Cjy}PLHL2H2=S"':D)"q #nr5MD#JjpFRPRR G2$A"Ŏk1#D,_K.B+Z # ,`mS"Y*RJLp& "Xyim5K6@+S$< -Sԧ ߄J,Lga:> Yy:.'T2SĘ &#mB:;$*f6OAS@pq4M+'TPJu .P)Ӽ yĕ<~̾}GZ,!\w\Ozۥl:eh[U Qiɹ%dK[ٗoarQ[Xj>v 1($6>ε{mguRګ4.o}v%`1p$F6&ՀU0T=7 ԩQӂPzJU9s]IVqU96yf8,kHm4%b8 28*!&PîFPi$ݶ#۬",WT> ֵhj )T S7Z5~̤(kFWYXeVa6 ak (q2Hc5L)KN_- O^,v>/cĴM}> 010}q `HN9LƮFn4zj99Fy\y< F^STkfse9Jzi@>H LYul2G#Q8WzV"~~0aAIsfi)DK/r6l_?9mGOG2}bqt2Ooџ=Liti,.jZo"a@?\6~I>X{tpO\{C݇QaǷя75dAoxD,P.N???C+o&NŸOIos?-jq d ?!ޏ#PGO&6 #Yo=z=LK .=~?f_"˺Ofd/{te/^]\Kt?^/Aw04o^|9]2rBb`Z74P0FVv+O7 Y? 1 C׋|H-wQ&y5wSQ^5؀ݛ%'y[] x1>6BZ .[YwvMZ!y~8˳92wpZ@=u }3-&xrbfKiƫOz /INfyҘ)PUEX .ga* +ec,LbzDDp\N&`%E} J7ɦs߄ ?L1)]:5WrL}}5eaG豤097!:vs}B)y>v ө"ЫܽNMt 2ZyUPX@ҵC/.}<aw(XYL4Hj뿦v=RHjtL3uTNHPsCuV#42I͟lZ;{e?񆎬G[BhEV#}g0sgǎJ5Fگ:@-$_waf,ނU64Dk,ߴk K97{BWy֛ɳѧ.ƫҢ7c tVg9+.6 V+qb8揣yMw{L% ƺ=Xoma[m.؊cFuۘ5nf2+"5Y]De\ %8Xzh4!V,XEw59qy9Q;Uf(Yj08X>׆v'zC{|60 9d\/^zo.ll?Y.Ю\׺"X.֧!dj6h6Ϧxa:wON0]%:M]%=ӏnȝC@hphvinƣ4 %&_]&gigkk[Xv4ˀ-2DŽ3p ?6\č7}ڹ9l֟lLgBJh,<%aыF%A|neEUCݕ|`{Mzs4Ey"]b?{Ǎ,ۼ!bw0F$-lT]f5%=cxwxn$?*Fee?{}~ac2^bc=ii!c%ǥ$!c_..pI 6=¢JJ~(POs21z.{-k*L88O^;;En*P=ri&9\QkƞFwj2ye$:n$"T21jF=Vrg@Xhpʔ zQ mY)E5ϔ,S y%g1TK$cD啲ZZ+bo9j=W@X},MIK'p%NbӏKlQsQb04b~c~4~iP/&Sf>s 4D=gA +aVP @lp頥 FD^)ZBI'qD,YXqǪB@Y@ss%Kpv"9H4"fBr5>0 Ar!a!asXѬ ]'if!A1,$+&vXHs$ ɣm$6]"wXHhf1TH2lp~I|%!C^ϒA#FW=c-3\]ǏhDcy~ sCwSBd|\`u j q' ;z=zl9 TH)=A10p`%0#kq ,V~)йԂIn818ۈscd-gD^fblebt)ͬilM<כ*vwW }0cFZ_Je./ỳ'oOdSɶ[%W,>@X\˧$~pf?_% Ym֊d Ѐ,K'$PJǺhd)C2Bۧ.9V᳥8zxȆ`O=fF㑍)og(wq ʙ%fڰ x؀O9{ F;iCKy@kH^)gtHWw8L 6:wHmj0tGsPxRsxD:!kG^,0l pw"bx7PPPPuC5aY57D+U@TW\J`4rg}$]uJy%X5E*/*6%%~w4մk{Tb;rCDZ(?6@B`GE/C Y4T̠:ӱb3!*%\%ŬMClVHiMO=S=U| aT ^yԦ%q绫`@ еE!uinWrAR*xGJW&0gS3qf3&i f-Ajנn&t98xc\{*F&@Ӻ.Wyfa1İp¡ZnE¥if9H\ҴERIi^ajI&uVo.iceo?bA@A=KpTRJNd^#NsQ5 CCݻy5gAN:!E0N8h*mV0!' &wNvQ&Qe:r*Ӂ xB[4dBǏ7{E*(tcóvB%7#xAke&}mΤX, fgF:%J@bEd5<O< a&u0&~O>`jD TH!1ww0RtxM "Wfbm1B8a @5C(BN\EHiӢ~T T:ZibkGu9k:8ԺJ?TtOdUOh'u#@ M]B(][9~8xH>i"Xw}AZUc7S`墷*J,8eXpPpP\„㡬_~E*nh2,SsgNS>R BkJ1J}4)L}~>J?~GJE77yr6jpgLzJޟ[{**9BuBaZBC Cpo Ӝ͋9:-u:58#8Q{F~$D?T.h6--;#i$3ydKp&Hk-L"A҉,{RM/e݃\3,F[鏏W+:$ɠnnb˨Nnb`tTZF7@+V74LȂV cg3'|tR* TkAETgT^CT*]"]*mtBaꢬ2ʢ?/P!D{TObceD؊k9f24E9hAbUN1ꗐL-D "Vw'< PAЂ{m/߽vzqynuB%x'68@q*Dʡ,A`(RSCd]p8F&Ǧ&fwReL`SrWrqVyqqzq'lܜz2ǽW.](mz"dӆ~lH6&\mP[DŽ|oPI~LHk '{f~ӔzGFTRVsӔŽ# oU.#UVwӨ;K6{\e*TDrW7 0_ {8?0g>]]N:9;d>jEOs*j߆߮w|k|Ot sfrF0?x[ogyO@hD->A+Of0CQ+" `EŠ~`B 1|?J}?" 9BL%0EҬ\RĢr>X] q>A DcL\rF=LJН>D\ U v?cXh wNQh*?s;sw2K!'N ɇ跥϶p}vޝݯ.Q5cۅORђNPхwSL[kJxǃr΍Ǽu* 7B1&M-^!zx*ƃT3yf=1 NBT=FtAy@J*C26ӣ;a }lDWO^4c|D&n>Z`Ĩ%^UT%{on/ } (gԕ!q20J^F:߃DS%wSR)}R?(;a'i6?YTI qh+#8;J?6w[m+N3^G^$4|1V\ʥ_y:hNb IbƼ3}wR(aӟb ȓ'b*)EKWdLϼafkf#_^h?ׇ4{Yd<38̹4LRUknSQ&E*7g׷7_;Yv)|n{ݛۋsvI YooY&Wsr"4:A.1m4͜kL P  xjVJUۊ#,q:ѤR :w~̇ :[Ҹv,:WuJ,cFq`h$@j7CQ`F\ - Zg Ǥa.b` '7I9Q ԕ%G"V' RR>)VapR;v( <Τ T3usDm;8aB2 6=`F빷E+hS]x9r?C鐶nm=Pe)osێda8?5{ׁ`A Fو9%$^YĬW 10J4i6ClC7.woR:axݛ<|ܒN#JjӾLhQY @㔊Ngl +aڂyryg!@P Uу(\{jh : 8Jvh"rlB-|VIZB6De lD kGYw@YL;Ӕ RHs:8*5ۡAg|Z#;[,'\*t'l^nK5sQƁ==os3.errGP˱cL;1DO[ BWdH%Ro0@Zף _*%ebZWsknDRqBwK"H"ܻhJ Y.ѝ!ʠhpop#.x&[! mz)`f㜫AKmtoIvZ ֝ݬ 4^.4}6ƥO}ێisܻ7Vn2YgZ 'O2 yz28pf?_ ]Di7j2uT} 8ja@I]:=B*P\:V Wm0&&g(׷{l ?UkK51ҥW6P ݟTV60El>dhp9ܠJwqyC:Et#D 8x{}r:P6kJYwWY -_T"-KLO֘b|?i?{Zp~uҫ Ktv8WQRZ4=*zcTZO5>-Ԏ0aLC{YsM/Efn]7_UL`zC}_rr L?4󾼂R I3(7x2Z Dk5d̉sgm?<qI,VX$ȧY nɇMАD)Hgz3"}IG~&-f1ZS|z5!-~ҘJPI2ܸF,(x$,ؼk6{qWD#q'*dW\BYXdjjкr I²@ES܆D$AYLq15 S2A󎫣95} U\ D2HQ.,7?)lmtnBM;z?ydŲ#W׳wp]+z7?Et: f>Nr~rDl+%g6P):paH!2KOL>|TLtgq/I$hkѲ i͘= IAI[A lz'c7z4v%[ v2LV5Η}2]d;~q:}9YF:\F=Zzݼ*<]ک:$;9wV9UsYm8čU}H|ctDioO(:gIS.!0\-4:`ve9*+:L['+;%wU ,{2 #eS,2n͊=qJ`$ZԹ. ˓I9@fZ)H$?`e .øVvw"p'*=r.J:3 {e_04zmV B@U#U>:d .-bs/ۚD~(߻:56r`цG4⎿|7i-%tn^O1UeV5Sr~yz=!JgՕeۇ82?y,Gop#{ cAOQ$xljdTh98M"lM$}=Te8eLE<)VN^~qCͶ5֕ƹ-uE/!,,p ]FG $Sƛ/3/>MX|֟l e/|W.ZeIM_o_AZV=j|]? ⫟/n¥Mt n7Kja k)*EHº`68UYtvȠяN?"Zu={pR7u /*m:#A~ON.ǵۺ?nn]oel̰:3۾4s T6H/vN+-+P*mKTA?A|p&Gl;b3)ZCAhy/<4o"3G9a(]Lʲ%UΨ4%6TqlFFLyC!.-ZZYdwr|0 4p|X=Z5=x)WfB\WDRP[V-,3KT¦麛/0Q-B: " u֚`xy?(<}S} 1 ?kH$ht UD, VSkJjY9.*"Ɠ5u@2-]!G!6HPFq2;0^GVҊ~BHL*]}T;g*c"Ǝ+%Y{vGpiB4?y ޽?jb(xx5-W[+V"aizS"V\ Et@:gp^ HL[egmۀrJ}Ζ<ϳ3ŹdpZ}R:TϥtZ {D ]ʯg,MmD$C  KO ;ȴVt(Gי#f%`(*rc+`kအ$/wK<|J*wzoOrC7?j+L" ы?Q ,&nU{nUe[2ShjeJw.8cL]cK168e#bU* 'U4zn4/yܶCS/F! h9Sܔu=7,Wp"@b~jMe%1'BS~QJA AFNY78Ӡ҈>>RMs?qLX isI]hHL-FOY=BXU3F-wž˪'w@+J$+*J빮rPJ:#H:,}A#!PZWt*…CE~vC䈝oX<|sw[~o}kTl7O>$(9p`2Em L|1J\UŅ-JfD)ckAF\6uecCM΁Br)q F{8ESeVN + cBh:ܐR!UM*u 3J7;N:DMF=4OW@9*WgI"mDTa1ELԃa:x)J&qt|y bpx#'' ds#TQD7/ F^?I#eFGY8 YFO^V6J RH;- R7IZFU@/[TÀ D131K!QB(ePp@ cXLlfZOv$USԕn$!,ҹuG7N rVO14 J*( [*E?Xڮf"ZRj&5}9ރ,kTطƗE+mʹvjuTKJ`$鬵vN֍q 7´o; `+_$xUb JI,nʳo~#? :`u_#v6Rٺ;Y<(*K\ԍwoYTktż%kCNIՔsnx'QTxWۣ֫ Z$Lv/ Njn;N'ֳ,N [ Ց#trr7U H-Yi%h0daC8(&G:rP eI=M9'tP89$G%un]akqxZ }K<۩*Mj}xv;B%Kl[*M+Ş8.]qdZA횒5{ :{ҡ7WyzL{X_(`?Q;O桃d,ELL[Z!*ۂ@=G_J[PB0'qkƼ$`FM/+"" ;w0~o΁w| \*4%K[")l,9ӼTIјn'y(4ȉr!%+ta" $55meƇLZhW1$оIZ%x(}5j5]s}]I EjTUsѕq&x7fp+V7@QPe`>Mɛ7'o>V?kl!XGD 0uju]Y[VVT)S.ϺYחǛ-xjmx=o_~נ͇zv\~Xsw* ͯ-g/}x/OY;esU7kIn*)E$5pm]lkkESh(UQVpNI-]UPf2bYIu{ǿ{6+ _We~}J_VR[l\K\>$ļD$ E`"$[aϻ;rypōT9-r^:'8n>e7tjlQ4̠k+ea ^CP$L X aB$ sQ3ۈom E" XBxsO+\ {Ǔ1vJf!1f}1EEedK*n[3wjM~ !k%ODTtNQ6 * ~x ֚$E`i4er_U|\6MnѺJБ?)zT>?鎧'ϳh6[Kg6EZ 㚉boE\|BT󠔤;o8X-y_?Hg[w7s4?鱵+_ʮkΚ9\SZZn&7&/-uxtA6zu|,"oF17<^#^&b\X2YϢ`5T0h•FO9X7eSm*Zfl!ov䪡A cЂX!=bur.X_|T29ʝнꏌ/NYW>E% h0q]I;,R`r=H =JAi"z6IYq:D? !7symJ8'n ^z8Qc7ľC~7:﷽(=QgyYto͸ش_3b=޻ŌHݵIib\X mj=]#Ԓ~;xrD;nڀ;dxF, W9m#4ǖ;eG?y_+ی `Ht&L֎,#U'ɡ ^jxy]GMtq)h#QeT_U2S'zU XR%SXT(n.Nn}E[ƾz(4z˻yH}Ǯ=ޏ?zp& {FO^};juY 䱔yZtdg[V @Ԓ3 3֜Iǰ xKB*aěV nZ4^@ x @>דJ!R[c2^A%O;F> V+w y&GDm25ЂO~zS ;W|~=e` !uAe ?ZlgOԿǂR]X>FޕVQxMn$ҴF.o(Eͺ]죶yʷ?53uz[?/0 l~WN>H9T=go.Nο.>fW1j1˹HGwMޏV([<ϗ@0^8TTv}yӂȌՆ>w^c"P~7o.s${5O qhȘ`xFhM3Q>tKuږ+`D\ U^S1#hZ"ǣIJ1甀\.gID|qWt&S"B@mKT jYGzpD3u%桱F8Uv'hq*v˒n̟l4*]ڕ q2Gog_2.MLf};si^sU3K.Bb-IY%/CW>xǙkOk^,zfmhH =գP<4)8+q(%=qq#0qY;!W9'a?F@UR'X+q$ c_ѓ5I]UmqZkH)T۠q V/'m ȉ/TM091E's6fp暷$jVrBEl,$(l J pvS0LݪՌoVVxZS|]D"8z,5M5YHA)uJ2}e%;0o//EEû/>iDN&L ydLE7gPCÀM{ɭMgv0:jmjQ ykӠfQ֦fA۹5qz6`zE_hj4D]'AkM;6! 1kRl+=)1]uƮr][o[9+FvC*^d}؝Oxu([R[[mGGGG>U} *7x>(:ʞΕ=]l֒ce~+{lc]UYGw'|'!+%lJiKV[wVPHv]ׯrqĆK W}\eצx9ӓy[ [yKMs ɕb&gw'H1Pw ]ae-{LM|;Lj\SlAm^ׯJ<79֜ wpFo9Ew%I@y ҇ 6Q*u}IN;cl"HB:o%l5VJ%y@(Hc).hXM5R&{5>qNX *]+Đ]KFs5F<&]cBVʁb*cl@pyv+=L?դ.~8qV…{` YxKZnq?#^JW_'~_kjt@wC!^\>7kP{)#aUދ@:1H~ YVsKR{INCQ8Z " TB$V kĈ\B@QpΆ>qS7?YZd {d$<ݞ1ߓ2$9PG[=/qJb~'o]uxZ0:-IIA 2 ZjiMtkV:Ue }xDʛnjxn$4k6?i4:੭7e4N tPY"9B.*ρfD0^x. HU "~1KI֥`w wc H) Jx2v7% ݸq5Bk[IgǑSg$VA=PtU Ya<5ex5wXVi+ sH9V8ĭ_;'otu C;F z,5_@"6X+8Х^w^YܐkI[Ӭfw{;E2|*(3d]pp+xe(ObTczxQ[ԎE=G `-`4댻(b[c(ͅ8XmGBd@[Qr/taSF#A/k.hYj.sD<ޕҤX}?/3,ҿIZ:WzZk9ߗʯIⅾq8}tg(Ο_#0~)"]u%W}Z*>[-Yu>,T\qCũ~>nfR[_Pwtn/Rڋ`֭euCũ6,5M}`ݬX֭ Z;X\zBYg>'ks/4w_>:޶:pk0uFQYu6V_>qp}>wtR2mm,Q=^4ٸ{>jn -*{* G[ξAuJ'NY{m??CWMF UE+~d_W'ӫ^=xRMeǣQWĎ_ޝ^<~~{άcSƦ݄,9cSábS ٮ :e 8BRvߝ u3gߛ[:Wc.j *NAD `"f xwd.̓ ڠ~jvI"lQ#Xs(ƷKJ ][|k'32i+ד+׻- nRVok~S?X|Rg^r_lX}٭y|Uo!YK {w/0evfJx :@>6*Q;} EQ[\ۣ踧S;za [4Mw.)z Ktۨ?lɏ,},m:Z%`QGG=JRS KsՖWÃQ۵dGEJRi0Ҷt1j^sdOReѫ^ eɕΪ'D}ol'{qy]Ou7/ *C!Rf_R+ٳx¼7I ze&qM} )P6F(5]5 0Xtzq@]g"x)\۷Gq*ކS2, 3\^gsK;SH~E`ua6lt+U8}`7֒m DMJr Lb d@0[dŤd0$d&gy# ͝>j͍ N[Jad2)nqAR#4Z//ˢgb| E#`Y2M!D @Q\ȳB8}㰠7 GW7d"ؙ'". &Ќ'CT~J"NOOԆ&}FX2 ȁa" ;6BEG<_i" po Pbm7[H+\mQ:C ^r5L ݾJR)'DLQG PjYw1\<~ kRB[?H9YύQB :b`.N籭’Jv48DshH@[z"1#on8+ɃȩE+=s:Y$1v%kB^kw'aLiA_BpEK6H7o~ijḞL}/0)Z,^-ֲEOG_J1EDS *y-LRDO%ttҳq񧫋}:IB{=Kˬ2;!8BkDHgCYx0ތ13e"@2OsQ풐#^K4DpKlULX( ?d!NE)9FΜ7Y(<Ά/NW,jC'V`l1)~mI2QN2 !.}wR !Bf,!MgcNVX$KL8W ؎_5W#7SOӧ샯y}e HZ9E)7BeU*tbXZaU,G ۶5d娷RTHP32vb2}'bAnT\sbBty/ӈ /)j cwy}1d^Գ"6JF`>Pul_MtP'guIzvRg1%-nrP}y=̧Mp}\*HH zJ2PZ(,Qzv]7.=K1RRt}wg+)`KQL6KY2P5GJdLiY{(ɨ} (`ݱѠ[9j_HukY$,:;K q>qUʚ~vfߪ()P&Aǜ0dVP3D{'(q[!:U^C U2A2l# j_ؑ^)qI3[-B }Fp˩nJ;W*Zp{#]iG蟳K5rFě,K-RMm)ds6z?″'$PdV6ECsbl&\mf08Ğ~Rm:5$|JaLD[P7@ C,b)!\ZnH mQV8ݰZqzRtCSJ]tRq B+ݐ:&v5DxCJQ<ť,:q:f,E랸W e&b]J K9斞GlB%PV.e{1qfI녕x= j OԍqyKO:?ͱRvjsuRd"mF>qJT)Jhcbl,\BoCZ+&]sK)/n+IJ9JlPsLbYIbW 9xcmW7&qJiY{1{˶10LoN^)5J Jc[] S(qUJ4JnPsLݲjԞ֛W }X=*ò(*xvr{p0Ͱ^= c%RMJ*BD1e){_fD"zD1ǴۮF,ܲSJ.9lVJs̋ayRH;ڠRK-?%9fv5xRJ@b:|HcY0' JD l߆1VzQ5 Y>SJ)nRAx ^o)es٤xziz5alW^;IaZTX0WݳN:8֒@ ;ӁA2F $ixb$kCT#_݉Հ0(f;P[Bzxؒצ e Y%oywS怂=w7Bow\=•;dHPƊlwm/[Qо =0L`kռw& 2^|-j:X1 CiBB}lf 0uMߊE~Nj ~Xܕ~-f 2 &( C;j&y!fT{k> gT҅K.}8=i,cuZױk@+wO96jIո2ar&he'?#p NZ_}c5-(봖Ew]°0"r_hU}m1ǒxg~ou\3 gO@mvI^~:ݶj&rЄ1֗IwգLzks92HđgV;7jD3ʃZ@{Ez:^43fz6׸'A؅߶]3Zє ETրnw;aGa#ս={ӝeJd_ŀi~b\U땿UwnRNsj`io&*XA.(a@sP>l4}]hvd!j+#,$&W $W$0#z/:|IX_lU4ݏw'^owe?U_qR7 ^S_~j'wxnl)po|3;^)&\LU)EҫW砯<+BxQ{v`?^ȫtr?{}NN7iKxxnWse`?l۵Mu率qo_s o<0͛I,+ >t }΍ XVo6UMo98X @}3H_+iȴiLhXtxUկ^ukvOx8|]7'ݷݽqKknjقV"t@nj.lMvw/mdwݓ0iOGo`=÷{+7Z֑XJi'яA8%[[=̗_IgZvn-[ݳt?`m?lmGvѺG{3/[ɼ;od7f'5'w^E7ӌޛxaV9~l7Go>uUswG@cZ vۭ&?0ꭕ4Fš7gyƒۭ G =֚wOwޛI7.9ݻ$LM`^O.,ϳ7v̂AMsFe4aD?:l5wV8Gquv?9򷍫{>n\Q9Mh1;x/c/[iFt~xv/e#) H"XyTp,E&NZ@HHb.1>>V4#ƓL&,XYy;bJ(g=Vd=o8KjrL3W .g"\ L} eF+J $ X,|ŏ IhHCZSHy%W1ٲN60HW1"I_V˷nC=cYc,R ڔ" G1 ~HRF!T0o"$ 3];Pהeb/q_W̌d{QbQ霋b2LlDΎ8h,@~9hQlB>z/}Dž@Ypf'bO}'EJbD-+G3V>.V9_5 1G # \h$gxxµw_[D[h$wLs 4[e@iƯjޛVkGayGYL Fekp$ʶ D(hFc6L㭗GGT#GIFi^dtz'7l̶k[k=K8 I63q.,TI%^:"ei|[zyNAC/;\*duz%;ULv"`1 rC QoA(֚G!5t3I  E$L.C! |HB%| 3 .DFB"oi,>h$.V:#}Xb9#`c8jXNHYQPdfO'~d7G>G G7Ɖ bZ}hHX{ <͏}Vx}6I%SȆY{p`tS&(#ᰧ:"RHG?|_Z(u5ٻ*NQTo'?X9wokDbw. !AyR@y\ Xt!D"^bZЗE,'Igb;N+1nN1^)d'f6H fphqoX`B3kbT!*}_hP:^'UcyxV)z:7~w~=uz%QA;<IJas5\}Wjdbm1i (;P"2UV~ uSnX# dVlxt͸nw ~x`A|\(,g3߉@@.Ti#uf^# p;Z\$aB)H!W), gO;[k㡑vuaS6nY{#mÿݍDRd }  @Zim|ݹpQ=M=HpHsP$#p&{$^M=%$wbSG _B»>><5#z(UÒ|F7NH9؋b4yE+ק35+˨7m~x5K!XEWE:=<}9qvmZe͊iY1٦dM؜| slTbJAXyGlHY+o"#E9;to=ZƐFs2\Je9xmq6z>u AQ 1Ŭ^<ʓ7K%O(첶k-q^2EkM/׊Nц^zaMJQl}z٭\fnyʳf,#5-쟹{hQACz-ID[_mlguϨvj]N7{`t)bf H3FV݃Ɩzr4h JʷѨ|{hQ͖?^4)X0X7 yvb-X Ay)R=}8>jPof UOf'TJ,ݴ(\>~vÂOP^FB!tt֜ΡSms|&g YQ8UlJHl}!bBrFNGA&Ph]x7{T+%[ԁɚ|Ns=NLO)0&Q$ض(8#T8icr9b$R4[]/kGFku@h3F!^ڍc 3z;=}HDmL& 6( 5ʛ߂6y"96A\r HvW[CuVf%z.DZ O*-m*z3>8|'Ĕ6oGBm|3 m~ ;L [KDyLeoۡ\?tqõΌk[?0F<>u!;bNAwE޻K& :&fŷl|Lq.TO|;1l FLKCknP_^kz-û_뫎|aY>zjN_B6r~m&h';>3h(wg[1޲T>1Sߊ ŎǮӋD\YVgv6BxS}rV ZʬƎ=-{\SW٣T'hY3_Z|~>OMxtd7We>DݤyTb; .~ Liʝe?oom&bSh1(d %ނ'Ev~ܦgҐ/\Ef}Ϻy`s m*jTg;ZR2xgF9m UtN ^kİo݆/hݦFuź]0 do/f&oֺ͡!_)k=/8ڙwm-7㧷i]<{N8œ٘,;Go%5I8aTYZCf|WSeH׎L{kq䔞#gY3 ř@U8SS6tP#, |Ey Am?&hk^||OۜqZz+dWwy*B*#qK#>)K=|;+1O)gȞbz [[Mo$2*G םgg= XM5͔(HlkeR,Ko6LUy$OfKh-Hl%, i[NݫUHǺN"{U/]v_%T8(A%.^R8_vvRYN θ7kW ;>"Wk WC܂>pN5UT1@Uqˡ(W;DU/nAF{}zq)&c&uD=8Kk5}ܵ|*}q277(&b!+ .FIHY;Dam:Ĝ} c :O%TiP:pu" :;T"?#_Ȃ]GΊsWXȨЙrJ.w.1F!Kg T9Lfv̢q(9ʌc%2=mdg\ J֠ \'9ixQΗp٥ڀV׀Yʃ@|vfr.[)0ÒJ$rꝯ}f**2Fuź]Ρ.2uuZ6|*:Ex0XZ==UnyPu0N|Gv(‘]wjm UtN5K E}V9^Q2yUƏQw&pfc7k/ bn FZRO/Vҋk9s]@bA/_fRaxu'TrZF\@7rk{O 68><󰁁f[wuvYVy.jD)Ȭ585ajC`صb\걊Gq,O̭bQ`Kd$ԣGQ=(ɖQN; T3Qh"u\@YPj$U2d0١E/; Qхֲ.ېR0bZΆ0jDZ9690YE3 IJrg)bX.V: s*W‘|lfc3u>E]::u?6var/home/core/zuul-output/logs/kubelet.log0000644000000000000000004104524615135721764017716 0ustar rootrootJan 26 16:54:30 crc systemd[1]: Starting Kubernetes Kubelet... Jan 26 16:54:30 crc restorecon[4688]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:30 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:31 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 16:54:32 crc restorecon[4688]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 26 16:54:33 crc kubenswrapper[4865]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.824470 4865 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827780 4865 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827811 4865 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827817 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827822 4865 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827827 4865 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827832 4865 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827837 4865 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827847 4865 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827852 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827856 4865 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827860 4865 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827864 4865 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827868 4865 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827872 4865 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827875 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827879 4865 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827883 4865 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827886 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827890 4865 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827894 4865 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827898 4865 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827901 4865 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827905 4865 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827908 4865 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827912 4865 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827917 4865 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827920 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827924 4865 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827927 4865 feature_gate.go:330] unrecognized feature gate: Example Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827931 4865 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827935 4865 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827940 4865 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827944 4865 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827947 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827951 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827955 4865 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827958 4865 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827963 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827966 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827970 4865 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827974 4865 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827978 4865 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827981 4865 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.827985 4865 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828011 4865 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828014 4865 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828018 4865 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828022 4865 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828027 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828031 4865 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828035 4865 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828039 4865 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828043 4865 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828046 4865 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828050 4865 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828054 4865 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828058 4865 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828062 4865 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828066 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828070 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828075 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828079 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828083 4865 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828088 4865 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828092 4865 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828096 4865 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828100 4865 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828103 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828107 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828111 4865 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.828116 4865 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828555 4865 flags.go:64] FLAG: --address="0.0.0.0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828586 4865 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828600 4865 flags.go:64] FLAG: --anonymous-auth="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828608 4865 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828616 4865 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828622 4865 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828629 4865 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828635 4865 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828640 4865 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828646 4865 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828652 4865 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828658 4865 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828663 4865 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828668 4865 flags.go:64] FLAG: --cgroup-root="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828673 4865 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828678 4865 flags.go:64] FLAG: --client-ca-file="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828682 4865 flags.go:64] FLAG: --cloud-config="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828687 4865 flags.go:64] FLAG: --cloud-provider="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828693 4865 flags.go:64] FLAG: --cluster-dns="[]" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828699 4865 flags.go:64] FLAG: --cluster-domain="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828703 4865 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828708 4865 flags.go:64] FLAG: --config-dir="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828740 4865 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828745 4865 flags.go:64] FLAG: --container-log-max-files="5" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828753 4865 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828757 4865 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828762 4865 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828767 4865 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828771 4865 flags.go:64] FLAG: --contention-profiling="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828775 4865 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828779 4865 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828784 4865 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828788 4865 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828794 4865 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828798 4865 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828803 4865 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828807 4865 flags.go:64] FLAG: --enable-load-reader="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828812 4865 flags.go:64] FLAG: --enable-server="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828816 4865 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828831 4865 flags.go:64] FLAG: --event-burst="100" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828838 4865 flags.go:64] FLAG: --event-qps="50" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828843 4865 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828848 4865 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828857 4865 flags.go:64] FLAG: --eviction-hard="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828867 4865 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828873 4865 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828879 4865 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828885 4865 flags.go:64] FLAG: --eviction-soft="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828890 4865 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828895 4865 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828901 4865 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828905 4865 flags.go:64] FLAG: --experimental-mounter-path="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828910 4865 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828914 4865 flags.go:64] FLAG: --fail-swap-on="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828918 4865 flags.go:64] FLAG: --feature-gates="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828923 4865 flags.go:64] FLAG: --file-check-frequency="20s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828927 4865 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828933 4865 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828937 4865 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828941 4865 flags.go:64] FLAG: --healthz-port="10248" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828948 4865 flags.go:64] FLAG: --help="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828952 4865 flags.go:64] FLAG: --hostname-override="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828956 4865 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828961 4865 flags.go:64] FLAG: --http-check-frequency="20s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828967 4865 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828971 4865 flags.go:64] FLAG: --image-credential-provider-config="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.828975 4865 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829006 4865 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829011 4865 flags.go:64] FLAG: --image-service-endpoint="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829015 4865 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829019 4865 flags.go:64] FLAG: --kube-api-burst="100" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829024 4865 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829029 4865 flags.go:64] FLAG: --kube-api-qps="50" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829035 4865 flags.go:64] FLAG: --kube-reserved="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829040 4865 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829045 4865 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829050 4865 flags.go:64] FLAG: --kubelet-cgroups="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829055 4865 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829059 4865 flags.go:64] FLAG: --lock-file="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829064 4865 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829068 4865 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829073 4865 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829080 4865 flags.go:64] FLAG: --log-json-split-stream="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829085 4865 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829089 4865 flags.go:64] FLAG: --log-text-split-stream="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829093 4865 flags.go:64] FLAG: --logging-format="text" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829097 4865 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829101 4865 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829105 4865 flags.go:64] FLAG: --manifest-url="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829109 4865 flags.go:64] FLAG: --manifest-url-header="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829115 4865 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829120 4865 flags.go:64] FLAG: --max-open-files="1000000" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829126 4865 flags.go:64] FLAG: --max-pods="110" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829130 4865 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829134 4865 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829138 4865 flags.go:64] FLAG: --memory-manager-policy="None" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829143 4865 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829147 4865 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829151 4865 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829156 4865 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829170 4865 flags.go:64] FLAG: --node-status-max-images="50" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829175 4865 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829180 4865 flags.go:64] FLAG: --oom-score-adj="-999" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829185 4865 flags.go:64] FLAG: --pod-cidr="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829191 4865 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829202 4865 flags.go:64] FLAG: --pod-manifest-path="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829207 4865 flags.go:64] FLAG: --pod-max-pids="-1" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829212 4865 flags.go:64] FLAG: --pods-per-core="0" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829216 4865 flags.go:64] FLAG: --port="10250" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829220 4865 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829224 4865 flags.go:64] FLAG: --provider-id="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829228 4865 flags.go:64] FLAG: --qos-reserved="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829232 4865 flags.go:64] FLAG: --read-only-port="10255" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829236 4865 flags.go:64] FLAG: --register-node="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829241 4865 flags.go:64] FLAG: --register-schedulable="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829245 4865 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829253 4865 flags.go:64] FLAG: --registry-burst="10" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829257 4865 flags.go:64] FLAG: --registry-qps="5" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829261 4865 flags.go:64] FLAG: --reserved-cpus="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829265 4865 flags.go:64] FLAG: --reserved-memory="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829271 4865 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829275 4865 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829280 4865 flags.go:64] FLAG: --rotate-certificates="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829284 4865 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829288 4865 flags.go:64] FLAG: --runonce="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829292 4865 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829296 4865 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829301 4865 flags.go:64] FLAG: --seccomp-default="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829305 4865 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829310 4865 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829314 4865 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829319 4865 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829324 4865 flags.go:64] FLAG: --storage-driver-password="root" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829329 4865 flags.go:64] FLAG: --storage-driver-secure="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829333 4865 flags.go:64] FLAG: --storage-driver-table="stats" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829338 4865 flags.go:64] FLAG: --storage-driver-user="root" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829343 4865 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829347 4865 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829352 4865 flags.go:64] FLAG: --system-cgroups="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829356 4865 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829363 4865 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829367 4865 flags.go:64] FLAG: --tls-cert-file="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829371 4865 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829376 4865 flags.go:64] FLAG: --tls-min-version="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829380 4865 flags.go:64] FLAG: --tls-private-key-file="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829385 4865 flags.go:64] FLAG: --topology-manager-policy="none" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829389 4865 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829393 4865 flags.go:64] FLAG: --topology-manager-scope="container" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829398 4865 flags.go:64] FLAG: --v="2" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829404 4865 flags.go:64] FLAG: --version="false" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829416 4865 flags.go:64] FLAG: --vmodule="" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829422 4865 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.829427 4865 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829538 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829544 4865 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829549 4865 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829555 4865 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829560 4865 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829564 4865 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829568 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829573 4865 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829578 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829582 4865 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829586 4865 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829590 4865 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829594 4865 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829598 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829602 4865 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829614 4865 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829618 4865 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829621 4865 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829625 4865 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829629 4865 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829633 4865 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829636 4865 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829640 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829643 4865 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829646 4865 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829650 4865 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829653 4865 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829657 4865 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829660 4865 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829664 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829667 4865 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829671 4865 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829674 4865 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829677 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829681 4865 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829684 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829688 4865 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829691 4865 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829694 4865 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829699 4865 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829709 4865 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829712 4865 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829716 4865 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829719 4865 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829723 4865 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829726 4865 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829729 4865 feature_gate.go:330] unrecognized feature gate: Example Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829735 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829738 4865 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829742 4865 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829745 4865 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829749 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829753 4865 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829756 4865 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829759 4865 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829763 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829767 4865 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829770 4865 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829774 4865 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829777 4865 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829780 4865 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829784 4865 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829787 4865 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829791 4865 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829794 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829798 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829801 4865 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829805 4865 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829808 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829812 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.829815 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.830029 4865 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.844395 4865 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.844454 4865 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844553 4865 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844561 4865 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844565 4865 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844573 4865 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844578 4865 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844582 4865 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844587 4865 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844591 4865 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844596 4865 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844601 4865 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844607 4865 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844612 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844618 4865 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844625 4865 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844630 4865 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844634 4865 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844638 4865 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844642 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844647 4865 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844652 4865 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844656 4865 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844660 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844666 4865 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844851 4865 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844863 4865 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844870 4865 feature_gate.go:330] unrecognized feature gate: Example Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844877 4865 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844884 4865 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844889 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844895 4865 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844901 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844906 4865 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844911 4865 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844918 4865 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.844931 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847323 4865 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847364 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847370 4865 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847377 4865 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847383 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847402 4865 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847409 4865 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847414 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847418 4865 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847424 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847429 4865 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847433 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847438 4865 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847447 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847452 4865 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847457 4865 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847460 4865 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847465 4865 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847468 4865 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847472 4865 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847478 4865 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847482 4865 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847486 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847490 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847494 4865 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847501 4865 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847505 4865 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847509 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847513 4865 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847517 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847524 4865 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847531 4865 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847537 4865 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847543 4865 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847550 4865 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847556 4865 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.847572 4865 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847834 4865 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847843 4865 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847848 4865 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847853 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847858 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847864 4865 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847869 4865 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847874 4865 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847879 4865 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847883 4865 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847891 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847895 4865 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847900 4865 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847905 4865 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847917 4865 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847923 4865 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847930 4865 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847943 4865 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847952 4865 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847956 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847961 4865 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847966 4865 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847974 4865 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847978 4865 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847982 4865 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.847987 4865 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848005 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848011 4865 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848016 4865 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848021 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848026 4865 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848030 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848035 4865 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848039 4865 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848046 4865 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848051 4865 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848056 4865 feature_gate.go:330] unrecognized feature gate: Example Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848059 4865 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848063 4865 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848067 4865 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848071 4865 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848075 4865 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848081 4865 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848093 4865 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848097 4865 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848102 4865 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848109 4865 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848113 4865 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848118 4865 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848129 4865 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848135 4865 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848140 4865 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848145 4865 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848150 4865 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848158 4865 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848165 4865 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848170 4865 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848175 4865 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848182 4865 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848188 4865 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848192 4865 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848196 4865 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848200 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848204 4865 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848208 4865 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848212 4865 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848238 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848243 4865 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848247 4865 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848251 4865 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 16:54:33 crc kubenswrapper[4865]: W0126 16:54:33.848255 4865 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.848262 4865 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.848742 4865 server.go:940] "Client rotation is on, will bootstrap in background" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.852013 4865 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.852097 4865 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.852738 4865 server.go:997] "Starting client certificate rotation" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.852763 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.853063 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-15 22:40:03.898261584 +0000 UTC Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.853196 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.884777 4865 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.886402 4865 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 16:54:33 crc kubenswrapper[4865]: E0126 16:54:33.886821 4865 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.893795 4865 log.go:25] "Validated CRI v1 runtime API" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.967391 4865 log.go:25] "Validated CRI v1 image API" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.969700 4865 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.989578 4865 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-26-16-50-25-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 26 16:54:33 crc kubenswrapper[4865]: I0126 16:54:33.989619 4865 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.098464 4865 manager.go:217] Machine: {Timestamp:2026-01-26 16:54:34.00798109 +0000 UTC m=+1.591866717 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:35d9e2a2-68c2-48cb-856f-00ba3eb74617 BootID:4ac1a731-046d-4d8f-8161-6e9e491f5dac Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:d4:4e:e6 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:d4:4e:e6 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:b1:fd:eb Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:75:8b:97 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:f4:94:b5 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:ea:d9:29 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:fe:69:e4:57:58:39 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:1a:27:db:8f:ab:e3 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.098752 4865 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.098928 4865 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.099877 4865 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.100532 4865 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.100576 4865 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.100836 4865 topology_manager.go:138] "Creating topology manager with none policy" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.100856 4865 container_manager_linux.go:303] "Creating device plugin manager" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.101117 4865 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.101159 4865 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.101473 4865 state_mem.go:36] "Initialized new in-memory state store" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.101589 4865 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.103266 4865 kubelet.go:418] "Attempting to sync node with API server" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.103292 4865 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.103322 4865 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.103339 4865 kubelet.go:324] "Adding apiserver pod source" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.103359 4865 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 26 16:54:34 crc kubenswrapper[4865]: W0126 16:54:34.111569 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.111647 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.114103 4865 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.114488 4865 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.115638 4865 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 26 16:54:34 crc kubenswrapper[4865]: W0126 16:54:34.117056 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.117184 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118439 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118476 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118488 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118499 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118518 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118528 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118538 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118553 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118564 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118574 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118587 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118596 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.118629 4865 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.119159 4865 server.go:1280] "Started kubelet" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.119601 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.119563 4865 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.119640 4865 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.120643 4865 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 26 16:54:34 crc systemd[1]: Started Kubernetes Kubelet. Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.121757 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.121799 4865 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.122100 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 17:30:07.075370875 +0000 UTC Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.122402 4865 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.122422 4865 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.123051 4865 server.go:460] "Adding debug handlers to kubelet server" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.123487 4865 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 26 16:54:34 crc kubenswrapper[4865]: W0126 16:54:34.124044 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.124663 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.124914 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125038 4865 factory.go:55] Registering systemd factory Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125068 4865 factory.go:221] Registration of the systemd container factory successfully Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125338 4865 factory.go:153] Registering CRI-O factory Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125356 4865 factory.go:221] Registration of the crio container factory successfully Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.122955 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="200ms" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125419 4865 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125482 4865 factory.go:103] Registering Raw factory Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.125502 4865 manager.go:1196] Started watching for new ooms in manager Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.125188 4865 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e56317af01ab2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 16:54:34.119125682 +0000 UTC m=+1.703011269,LastTimestamp:2026-01-26 16:54:34.119125682 +0000 UTC m=+1.703011269,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.127499 4865 manager.go:319] Starting recovery of all containers Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148230 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148358 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148389 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148415 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148438 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148461 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148486 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148511 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148543 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148610 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148638 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148661 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148686 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148715 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148753 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148777 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148807 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148837 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148862 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148911 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148935 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.148960 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149022 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149046 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149069 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149092 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149118 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149146 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149168 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149189 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149250 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149278 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149304 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149331 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149357 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149381 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149403 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149428 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149464 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149490 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149515 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149540 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149567 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149597 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149623 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149647 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149669 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149689 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149707 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149726 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149745 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149763 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149789 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149840 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149864 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149883 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149904 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149923 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149940 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149959 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.149977 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150043 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150063 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150082 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150102 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150120 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150138 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150157 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150175 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150192 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150212 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150230 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150248 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150266 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150284 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150303 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150321 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150341 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150362 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150382 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150402 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150422 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150442 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150461 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150481 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150500 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150520 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150541 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150559 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150578 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150597 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150616 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150634 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150654 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150674 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150694 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150716 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150796 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150817 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150837 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150861 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150882 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150903 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150924 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150960 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.150982 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151030 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151051 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151076 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151098 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151119 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151143 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151163 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151188 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151212 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151232 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151253 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151271 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151292 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151315 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151334 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151355 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151375 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151395 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151415 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151434 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151644 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151663 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151723 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151744 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151765 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151784 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151804 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151824 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151844 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151863 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151883 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151903 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151922 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151941 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151961 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.151982 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152025 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152045 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152064 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152082 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152101 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152122 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152140 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152159 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152179 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152199 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152221 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152239 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152259 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152278 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152297 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152315 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152335 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152355 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152374 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152393 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152412 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152432 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152450 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152469 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152489 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152507 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152553 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152579 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152597 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152617 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152635 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152656 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152682 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152713 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152736 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152761 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152784 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152804 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152822 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152841 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152862 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152881 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152898 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152919 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152937 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152955 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.152973 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.153021 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.153043 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.153063 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.153084 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.153951 4865 manager.go:324] Recovery completed Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154342 4865 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154386 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154410 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154431 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154452 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154478 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154503 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154529 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154555 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154581 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154608 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154636 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154664 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154691 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154728 4865 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154747 4865 reconstruct.go:97] "Volume reconstruction finished" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.154762 4865 reconciler.go:26] "Reconciler: start to sync state" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.171703 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.173179 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.173222 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.173257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.174147 4865 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.174164 4865 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.174182 4865 state_mem.go:36] "Initialized new in-memory state store" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.225861 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.326476 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.326792 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="400ms" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.354667 4865 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.356477 4865 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.356527 4865 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.356557 4865 kubelet.go:2335] "Starting kubelet main sync loop" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.356608 4865 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 26 16:54:34 crc kubenswrapper[4865]: W0126 16:54:34.357427 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.357597 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.427493 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.457691 4865 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.528187 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.601047 4865 policy_none.go:49] "None policy: Start" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.602725 4865 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.602755 4865 state_mem.go:35] "Initializing new in-memory state store" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.628353 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.658968 4865 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.727867 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="800ms" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.728937 4865 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.756721 4865 manager.go:334] "Starting Device Plugin manager" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.756800 4865 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.760943 4865 server.go:79] "Starting device plugin registration server" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.762390 4865 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.762435 4865 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.762818 4865 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.763062 4865 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.763094 4865 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.773929 4865 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.863147 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.865411 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.865483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.865506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:34 crc kubenswrapper[4865]: I0126 16:54:34.865554 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:34 crc kubenswrapper[4865]: E0126 16:54:34.866376 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.059722 4865 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.059945 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.062146 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.062188 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.062200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.062335 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.062945 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063111 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063481 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063492 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063705 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063841 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.063891 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064789 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064827 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064804 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064885 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064925 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.064953 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.065169 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.065324 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.065361 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066148 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066246 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066258 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066393 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066506 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066528 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.066619 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067835 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067867 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067882 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.067904 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.068399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.068418 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.068411 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.068429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.068585 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.068614 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.069465 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.069483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.069492 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.121010 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.122398 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 23:47:00.511035814 +0000 UTC Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167207 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167252 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167278 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167300 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167363 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167404 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167437 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167458 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167485 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167546 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167603 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167632 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167654 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167676 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.167709 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.268940 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269061 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269087 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269109 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269137 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269162 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269183 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269183 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269206 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269229 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269250 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269261 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269270 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269290 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269295 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269310 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269327 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269323 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269369 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269376 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269331 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269443 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269413 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269331 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269481 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269504 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269529 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269542 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269587 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.269620 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.274917 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.275063 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.393601 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.393686 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.413279 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.423468 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.448946 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.460142 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.466110 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.469343 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.482795 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.482825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.482836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.482861 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.483286 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.529375 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="1.6s" Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.557483 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.557640 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.568536 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-7e34687af56b27361768df29d61a18766be48e03e8513a9072851c6e333a9710 WatchSource:0}: Error finding container 7e34687af56b27361768df29d61a18766be48e03e8513a9072851c6e333a9710: Status 404 returned error can't find the container with id 7e34687af56b27361768df29d61a18766be48e03e8513a9072851c6e333a9710 Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.571343 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-c7e85e9d93693e73aadb7552f3569633de9cacad555a210e07c127e7139b9d19 WatchSource:0}: Error finding container c7e85e9d93693e73aadb7552f3569633de9cacad555a210e07c127e7139b9d19: Status 404 returned error can't find the container with id c7e85e9d93693e73aadb7552f3569633de9cacad555a210e07c127e7139b9d19 Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.575929 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-10b83f3564ae466b7c533af79c4d5d3a1076e8c712e52ce5628fd218322bffcf WatchSource:0}: Error finding container 10b83f3564ae466b7c533af79c4d5d3a1076e8c712e52ce5628fd218322bffcf: Status 404 returned error can't find the container with id 10b83f3564ae466b7c533af79c4d5d3a1076e8c712e52ce5628fd218322bffcf Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.577554 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-950dca6989842d4c78d16fcd49365b4bdb01d50cdfa2937d3d6dbdbaa3bbe372 WatchSource:0}: Error finding container 950dca6989842d4c78d16fcd49365b4bdb01d50cdfa2937d3d6dbdbaa3bbe372: Status 404 returned error can't find the container with id 950dca6989842d4c78d16fcd49365b4bdb01d50cdfa2937d3d6dbdbaa3bbe372 Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.581945 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-1b7a30e33d355363d11a1a40b609124abfc17bef40432d620f8db47ec471efcf WatchSource:0}: Error finding container 1b7a30e33d355363d11a1a40b609124abfc17bef40432d620f8db47ec471efcf: Status 404 returned error can't find the container with id 1b7a30e33d355363d11a1a40b609124abfc17bef40432d620f8db47ec471efcf Jan 26 16:54:35 crc kubenswrapper[4865]: W0126 16:54:35.608198 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.608359 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:35 crc kubenswrapper[4865]: I0126 16:54:35.972736 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 16:54:35 crc kubenswrapper[4865]: E0126 16:54:35.975006 4865 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.120781 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.122836 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 05:19:55.119460739 +0000 UTC Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.284117 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.285767 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.285802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.285813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.285842 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:36 crc kubenswrapper[4865]: E0126 16:54:36.286360 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.372765 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c7e85e9d93693e73aadb7552f3569633de9cacad555a210e07c127e7139b9d19"} Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.375768 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7e34687af56b27361768df29d61a18766be48e03e8513a9072851c6e333a9710"} Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.376678 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"950dca6989842d4c78d16fcd49365b4bdb01d50cdfa2937d3d6dbdbaa3bbe372"} Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.377926 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"1b7a30e33d355363d11a1a40b609124abfc17bef40432d620f8db47ec471efcf"} Jan 26 16:54:36 crc kubenswrapper[4865]: I0126 16:54:36.378775 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"10b83f3564ae466b7c533af79c4d5d3a1076e8c712e52ce5628fd218322bffcf"} Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.120435 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.123392 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 03:45:44.646939114 +0000 UTC Jan 26 16:54:37 crc kubenswrapper[4865]: E0126 16:54:37.130335 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="3.2s" Jan 26 16:54:37 crc kubenswrapper[4865]: W0126 16:54:37.303385 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:37 crc kubenswrapper[4865]: E0126 16:54:37.303455 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.886636 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.889324 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.889783 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.890091 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:37 crc kubenswrapper[4865]: I0126 16:54:37.890192 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:37 crc kubenswrapper[4865]: E0126 16:54:37.891292 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:38 crc kubenswrapper[4865]: W0126 16:54:38.099492 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:38 crc kubenswrapper[4865]: E0126 16:54:38.100388 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:38 crc kubenswrapper[4865]: W0126 16:54:38.100753 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:38 crc kubenswrapper[4865]: E0126 16:54:38.100921 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.121092 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.124249 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 13:41:25.606266275 +0000 UTC Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.588746 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf"} Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.590541 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3"} Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.590594 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.592420 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7"} Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.592487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.592514 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.592518 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.592526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.593251 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.593279 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.593288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.593810 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90"} Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.595561 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363"} Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.595618 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.596192 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.596220 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:38 crc kubenswrapper[4865]: I0126 16:54:38.596228 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:38 crc kubenswrapper[4865]: W0126 16:54:38.668521 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:38 crc kubenswrapper[4865]: E0126 16:54:38.668778 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.121852 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.124460 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:59:59.968827662 +0000 UTC Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.603370 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.603424 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.605624 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363" exitCode=0 Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.605679 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.605782 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.607938 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.608003 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.608020 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.609337 4865 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf" exitCode=0 Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.609421 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.609498 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.611816 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.611876 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.611898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.612575 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.614245 4865 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3" exitCode=0 Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.614352 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.614312 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.614980 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.615068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.615092 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.616758 4865 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7" exitCode=0 Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.616807 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7"} Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.616969 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.617322 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.617364 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.617377 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.618244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.618277 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:39 crc kubenswrapper[4865]: I0126 16:54:39.618290 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.120720 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.124970 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 02:19:55.737888887 +0000 UTC Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.244774 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 16:54:40 crc kubenswrapper[4865]: E0126 16:54:40.331847 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="6.4s" Jan 26 16:54:40 crc kubenswrapper[4865]: E0126 16:54:40.358771 4865 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:40 crc kubenswrapper[4865]: E0126 16:54:40.523353 4865 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e56317af01ab2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 16:54:34.119125682 +0000 UTC m=+1.703011269,LastTimestamp:2026-01-26 16:54:34.119125682 +0000 UTC m=+1.703011269,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.622114 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b"} Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.625735 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11"} Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.625858 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.627188 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.627214 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.627225 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.634234 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06"} Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.636713 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443"} Jan 26 16:54:40 crc kubenswrapper[4865]: I0126 16:54:40.640208 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b"} Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.161092 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 14:47:32.492192721 +0000 UTC Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.161163 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.162073 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.165665 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.165725 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.165736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.165766 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:41 crc kubenswrapper[4865]: E0126 16:54:41.166446 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.224:6443: connect: connection refused" node="crc" Jan 26 16:54:41 crc kubenswrapper[4865]: W0126 16:54:41.381256 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:41 crc kubenswrapper[4865]: E0126 16:54:41.381359 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.630707 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.647451 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c"} Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.651384 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b"} Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.673354 4865 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b" exitCode=0 Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.673461 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.674060 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.674312 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b"} Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.674387 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675041 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675048 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675094 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675106 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675117 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675128 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675077 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.675164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.946082 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.946409 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 26 16:54:41 crc kubenswrapper[4865]: I0126 16:54:41.946479 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.121085 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.161837 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 04:33:20.913899646 +0000 UTC Jan 26 16:54:42 crc kubenswrapper[4865]: W0126 16:54:42.277260 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:42 crc kubenswrapper[4865]: E0126 16:54:42.277342 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.678133 4865 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8" exitCode=0 Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.678197 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8"} Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.678219 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.679304 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.679337 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.679349 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.684165 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17"} Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.684202 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1"} Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.684214 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769"} Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.684289 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.685273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.685297 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.685306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.687216 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a"} Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.687270 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.687262 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.688001 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.688025 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.688036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.694051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.694163 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:42 crc kubenswrapper[4865]: I0126 16:54:42.694223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.121402 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:43 crc kubenswrapper[4865]: W0126 16:54:43.221980 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:43 crc kubenswrapper[4865]: E0126 16:54:43.222092 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.222156 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.222260 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 13:17:28.7012143 +0000 UTC Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.254186 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:43 crc kubenswrapper[4865]: W0126 16:54:43.358565 4865 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:43 crc kubenswrapper[4865]: E0126 16:54:43.358726 4865 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.224:6443: connect: connection refused" logger="UnhandledError" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.698096 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.698645 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.698644 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.698664 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6"} Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.700156 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434"} Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.700180 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.700511 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.700545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.700554 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701372 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701396 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701405 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701882 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:43 crc kubenswrapper[4865]: I0126 16:54:43.701893 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.121564 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.224:6443: connect: connection refused Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.222459 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 11:53:26.74772977 +0000 UTC Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.546169 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.723840 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.725734 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17" exitCode=255 Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.725819 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17"} Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.726034 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.727254 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.727288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.727300 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.728128 4865 scope.go:117] "RemoveContainer" containerID="f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736156 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835"} Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736213 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707"} Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736227 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744"} Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736247 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736324 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.736795 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737514 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737540 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737588 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737606 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737616 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.737515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.738167 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:44 crc kubenswrapper[4865]: I0126 16:54:44.738176 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:44 crc kubenswrapper[4865]: E0126 16:54:44.775083 4865 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.152471 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.223189 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 14:22:50.998868006 +0000 UTC Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.741466 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.743805 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604"} Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.743911 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.743981 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745041 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745092 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745122 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745154 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:45 crc kubenswrapper[4865]: I0126 16:54:45.745172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.222224 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.222389 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.223360 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 02:00:57.948514301 +0000 UTC Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.747074 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.747162 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.748369 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.748429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:46 crc kubenswrapper[4865]: I0126 16:54:46.748440 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.224490 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 17:20:46.487357024 +0000 UTC Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.459844 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.460178 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.462026 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.462126 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.462140 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.506854 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.507249 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.508914 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.509007 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.509023 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.567022 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.568763 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.568839 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.568852 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.568885 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.750207 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.751340 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.751395 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:47 crc kubenswrapper[4865]: I0126 16:54:47.751408 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.108222 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.108494 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.110104 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.110170 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.110185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.224829 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 02:36:51.999233936 +0000 UTC Jan 26 16:54:48 crc kubenswrapper[4865]: I0126 16:54:48.901573 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 16:54:49 crc kubenswrapper[4865]: I0126 16:54:49.225349 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 22:34:18.359221606 +0000 UTC Jan 26 16:54:50 crc kubenswrapper[4865]: I0126 16:54:50.226487 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 03:20:37.364374538 +0000 UTC Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.227358 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 13:33:52.629794607 +0000 UTC Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.950695 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.950861 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.952473 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.952532 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.952545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:51 crc kubenswrapper[4865]: I0126 16:54:51.955375 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:54:52 crc kubenswrapper[4865]: I0126 16:54:52.227965 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 22:55:26.732655886 +0000 UTC Jan 26 16:54:52 crc kubenswrapper[4865]: I0126 16:54:52.779070 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:52 crc kubenswrapper[4865]: I0126 16:54:52.780233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:52 crc kubenswrapper[4865]: I0126 16:54:52.780317 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:52 crc kubenswrapper[4865]: I0126 16:54:52.780364 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:53 crc kubenswrapper[4865]: I0126 16:54:53.228314 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 19:58:32.857509165 +0000 UTC Jan 26 16:54:54 crc kubenswrapper[4865]: I0126 16:54:54.228658 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 13:42:06.448579671 +0000 UTC Jan 26 16:54:54 crc kubenswrapper[4865]: E0126 16:54:54.775391 4865 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 16:54:55 crc kubenswrapper[4865]: I0126 16:54:55.122327 4865 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 26 16:54:55 crc kubenswrapper[4865]: I0126 16:54:55.152831 4865 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:54:55 crc kubenswrapper[4865]: I0126 16:54:55.152903 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 16:54:55 crc kubenswrapper[4865]: I0126 16:54:55.229882 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 06:19:08.304481966 +0000 UTC Jan 26 16:54:56 crc kubenswrapper[4865]: I0126 16:54:56.223084 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:54:56 crc kubenswrapper[4865]: I0126 16:54:56.223233 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 16:54:56 crc kubenswrapper[4865]: I0126 16:54:56.230659 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 21:13:41.569599169 +0000 UTC Jan 26 16:54:56 crc kubenswrapper[4865]: I0126 16:54:56.629651 4865 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 26 16:54:56 crc kubenswrapper[4865]: I0126 16:54:56.630302 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.231714 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 11:18:36.60568633 +0000 UTC Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.529804 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.530049 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.531687 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.531750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.531760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.541548 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.803516 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.805086 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.805132 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:54:57 crc kubenswrapper[4865]: I0126 16:54:57.805143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:54:58 crc kubenswrapper[4865]: I0126 16:54:58.232314 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 08:06:53.068177865 +0000 UTC Jan 26 16:54:59 crc kubenswrapper[4865]: I0126 16:54:59.232585 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 12:10:22.641445257 +0000 UTC Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.162441 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.162698 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.164357 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.164430 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.164450 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.167578 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.233471 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 07:31:01.567864906 +0000 UTC Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.812242 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.814090 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.814133 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:00 crc kubenswrapper[4865]: I0126 16:55:00.814154 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.236129 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 10:28:23.984943018 +0000 UTC Jan 26 16:55:01 crc kubenswrapper[4865]: E0126 16:55:01.628263 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="7s" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.629472 4865 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.630743 4865 trace.go:236] Trace[1310130620]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 16:54:51.603) (total time: 10026ms): Jan 26 16:55:01 crc kubenswrapper[4865]: Trace[1310130620]: ---"Objects listed" error: 10026ms (16:55:01.630) Jan 26 16:55:01 crc kubenswrapper[4865]: Trace[1310130620]: [10.02689642s] [10.02689642s] END Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.630761 4865 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 16:55:01 crc kubenswrapper[4865]: E0126 16:55:01.634155 4865 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.634230 4865 trace.go:236] Trace[1266708592]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 16:54:50.281) (total time: 11352ms): Jan 26 16:55:01 crc kubenswrapper[4865]: Trace[1266708592]: ---"Objects listed" error: 11352ms (16:55:01.634) Jan 26 16:55:01 crc kubenswrapper[4865]: Trace[1266708592]: [11.35242262s] [11.35242262s] END Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.634267 4865 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.634584 4865 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.635332 4865 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.639851 4865 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.662741 4865 csr.go:261] certificate signing request csr-lndxc is approved, waiting to be issued Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.665534 4865 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": EOF" start-of-body= Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.665583 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": EOF" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.667331 4865 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44500->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.667400 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:44500->192.168.126.11:17697: read: connection reset by peer" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.675258 4865 csr.go:257] certificate signing request csr-lndxc is issued Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.815853 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.816319 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.817769 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" exitCode=255 Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.817805 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604"} Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.817926 4865 scope.go:117] "RemoveContainer" containerID="f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17" Jan 26 16:55:01 crc kubenswrapper[4865]: I0126 16:55:01.830703 4865 scope.go:117] "RemoveContainer" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" Jan 26 16:55:01 crc kubenswrapper[4865]: E0126 16:55:01.830952 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.232982 4865 apiserver.go:52] "Watching apiserver" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.235772 4865 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.236301 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.236346 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 00:55:16.267309954 +0000 UTC Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.237041 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.237115 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.237632 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.237700 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.238227 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.238273 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.238725 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.238762 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.245159 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.245768 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.246037 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.246606 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.247308 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.248232 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.248360 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.248464 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.237679 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.261173 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.297180 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.307097 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.320266 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:54:44Z\\\",\\\"message\\\":\\\"W0126 16:54:43.777895 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0126 16:54:43.778576 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769446483 cert, and key in /tmp/serving-cert-3566720105/serving-signer.crt, /tmp/serving-cert-3566720105/serving-signer.key\\\\nI0126 16:54:44.053813 1 observer_polling.go:159] Starting file observer\\\\nW0126 16:54:44.056963 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0126 16:54:44.057189 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 16:54:44.061461 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3566720105/tls.crt::/tmp/serving-cert-3566720105/tls.key\\\\\\\"\\\\nF0126 16:54:44.262464 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.324738 4865 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.334950 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.343653 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.354109 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.365129 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:54:44Z\\\",\\\"message\\\":\\\"W0126 16:54:43.777895 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0126 16:54:43.778576 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769446483 cert, and key in /tmp/serving-cert-3566720105/serving-signer.crt, /tmp/serving-cert-3566720105/serving-signer.key\\\\nI0126 16:54:44.053813 1 observer_polling.go:159] Starting file observer\\\\nW0126 16:54:44.056963 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0126 16:54:44.057189 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 16:54:44.061461 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3566720105/tls.crt::/tmp/serving-cert-3566720105/tls.key\\\\\\\"\\\\nF0126 16:54:44.262464 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.376826 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.385649 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.394431 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399079 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399125 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399180 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399200 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399559 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399582 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399600 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399654 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399897 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399907 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399934 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399979 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.400303 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.400600 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.400802 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.399675 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.400893 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.400920 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401531 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401349 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401470 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401619 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401906 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401944 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.401984 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402294 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402317 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402341 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402469 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402500 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402539 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402565 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402601 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402629 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402652 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402678 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402700 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402747 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402771 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402793 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402814 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402836 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402885 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402910 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402934 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402955 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402978 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403020 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403043 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403064 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403083 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402031 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402103 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.402227 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403468 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403503 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403959 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.403104 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.404149 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:02.90412408 +0000 UTC m=+30.488009727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404195 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404224 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404234 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404289 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404328 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404336 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404562 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404773 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404856 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404454 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404907 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404927 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404938 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.404948 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405038 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405079 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405107 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405132 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405157 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405181 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405204 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405229 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405254 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405290 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405314 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405336 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405359 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405380 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405403 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405427 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405451 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405475 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405496 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405518 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405544 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405567 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405590 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405617 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405640 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405664 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405687 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405708 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405728 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405748 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405824 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405850 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405876 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405903 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405961 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406026 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406055 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406086 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406112 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406137 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406160 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406186 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406217 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406247 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406268 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406290 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406313 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406336 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406358 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406380 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406414 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406453 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406476 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406498 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406526 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406573 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406599 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406629 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406678 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406710 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406733 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406755 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406785 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406809 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406837 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406879 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406903 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406927 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406951 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406976 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407028 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407055 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407077 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407100 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407125 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407151 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407181 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407207 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407232 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407258 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407283 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407307 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407333 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407358 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407382 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407408 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407433 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407456 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407479 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407502 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407526 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407550 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407576 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407598 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407623 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407684 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407711 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407737 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407764 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407788 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407810 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407831 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407854 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407876 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407902 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407926 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407955 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407980 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408075 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408106 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408137 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408193 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408220 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408245 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408270 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408535 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408567 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408592 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408619 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408646 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408672 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408695 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408722 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408750 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408779 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408806 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408833 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408858 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408886 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408915 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408941 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408968 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409012 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409045 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409073 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409098 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409122 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409153 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409183 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409209 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409235 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409261 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409288 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409312 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409338 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409362 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409399 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409450 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409483 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409514 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409542 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409567 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409596 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409620 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409650 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409679 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409705 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409735 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409764 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409790 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409860 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409948 4865 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409965 4865 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409981 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410012 4865 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410027 4865 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410040 4865 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410054 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410067 4865 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410082 4865 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410095 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410108 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410122 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410138 4865 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410152 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410164 4865 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410177 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410191 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410205 4865 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410218 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410232 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410243 4865 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410256 4865 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.410270 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421958 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.440187 4865 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.460197 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.463371 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.463964 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.447130 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405513 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405568 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.405956 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.406798 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407158 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.407532 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408189 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408301 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408375 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408553 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408631 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408699 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.408855 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.409375 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.411980 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.416915 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.417489 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.418646 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421015 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421048 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421139 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421215 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421459 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421584 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.421625 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422198 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422196 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422208 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422242 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422250 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422655 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422830 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.422835 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.423022 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.423079 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.423252 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.423347 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.423754 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.424047 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.424288 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.424849 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.425398 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.426147 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.426612 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.426848 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.427059 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.427105 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.466757 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:02.966736683 +0000 UTC m=+30.550622270 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.467339 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.470704 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.470968 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.427240 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.427498 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.427504 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.427766 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.471110 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:02.971093973 +0000 UTC m=+30.554979550 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.428178 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.428329 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.427649 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.428664 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.428951 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.429355 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.429787 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.430176 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.433866 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.434225 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.434548 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.434788 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.435027 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.438740 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439039 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439198 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439377 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439528 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439669 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439804 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.439966 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.440168 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.440448 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.440585 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.441112 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.442653 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.442856 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.443071 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.443973 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.471399 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.444344 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.444416 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.444986 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.445407 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.445783 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.445899 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.446177 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.446298 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.446518 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.446814 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.446850 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.447130 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.447413 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.447608 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.447831 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.448150 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.448528 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.449048 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.449301 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.428898 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.457586 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.457943 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.459128 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.459590 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.459944 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.460304 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.460311 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.460327 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.460551 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.460833 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.461311 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.461323 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.461598 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.462018 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.462053 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.462422 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.462430 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.462708 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.463152 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.463469 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.464636 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.464882 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.464902 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.471439 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.471450 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.471963 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.472207 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.473218 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.473338 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.473582 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.473942 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.474292 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.475584 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.475724 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.479589 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.479742 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.479936 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480292 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480362 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480462 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480534 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480517 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.480745 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481065 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481408 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481482 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481529 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481561 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481597 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.481976 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482055 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482167 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482097 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482148 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482247 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.482595 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.482707 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482800 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.482933 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:02.982910628 +0000 UTC m=+30.566796215 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.482622 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483104 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483243 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483480 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483529 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483683 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.483944 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484237 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.483804 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.484373 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484529 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484742 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484751 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484908 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.484990 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.485404 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.485441 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.485515 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.479865 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.487133 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.487810 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:02.987779632 +0000 UTC m=+30.571665229 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.505268 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.508936 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511010 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511096 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511179 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511195 4865 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511208 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511221 4865 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511231 4865 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511240 4865 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511248 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511257 4865 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511265 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511274 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511282 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511292 4865 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511300 4865 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511308 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511318 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511327 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511338 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511348 4865 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511357 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511366 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511374 4865 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511383 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511392 4865 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511401 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511410 4865 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511420 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511428 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511437 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511446 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511454 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511463 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511472 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511482 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511492 4865 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511505 4865 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511513 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511522 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511531 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511540 4865 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511548 4865 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511558 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511567 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511576 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511585 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511594 4865 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511603 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511613 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511622 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511630 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511638 4865 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511719 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511728 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511738 4865 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511748 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511765 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511775 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511784 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511795 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511815 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511828 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511839 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511850 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511864 4865 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511876 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511888 4865 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511900 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511911 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511921 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511929 4865 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511937 4865 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511946 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.511954 4865 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512190 4865 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512203 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512276 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512372 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512412 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512426 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512436 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512446 4865 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512454 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512464 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512474 4865 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512483 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512495 4865 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512505 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512515 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512488 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512524 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512652 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512672 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512687 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512703 4865 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512719 4865 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512735 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512750 4865 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512765 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512779 4865 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512795 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512807 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512819 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512831 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512846 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512859 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512872 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512884 4865 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512896 4865 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512908 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512920 4865 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512933 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512945 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512957 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512969 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.512982 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513013 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513028 4865 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513042 4865 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513054 4865 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513069 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513082 4865 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513121 4865 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513134 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513150 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513162 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513181 4865 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513194 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513206 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513218 4865 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513232 4865 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513245 4865 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513259 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513274 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513288 4865 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513300 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513312 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513324 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513338 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513352 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513364 4865 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513377 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513390 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513412 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513426 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513439 4865 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513452 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513465 4865 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513477 4865 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513490 4865 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513503 4865 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513516 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513530 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513543 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513568 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513587 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513601 4865 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513613 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513625 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513638 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513652 4865 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513666 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513679 4865 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513692 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513709 4865 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513724 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513736 4865 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513812 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513827 4865 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513842 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.513936 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.514009 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.514042 4865 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.514055 4865 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.514068 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.563143 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.576394 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.583550 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 16:55:02 crc kubenswrapper[4865]: W0126 16:55:02.597164 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-eda4f2a38f5c3f9a8cdf763a3d981adf36dd9a50004c934ed8177dda23e5ac98 WatchSource:0}: Error finding container eda4f2a38f5c3f9a8cdf763a3d981adf36dd9a50004c934ed8177dda23e5ac98: Status 404 returned error can't find the container with id eda4f2a38f5c3f9a8cdf763a3d981adf36dd9a50004c934ed8177dda23e5ac98 Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.679646 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-26 16:50:01 +0000 UTC, rotation deadline is 2026-12-18 12:27:23.903830565 +0000 UTC Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.679781 4865 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7819h32m21.224078523s for next certificate rotation Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.715110 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.821609 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"eda4f2a38f5c3f9a8cdf763a3d981adf36dd9a50004c934ed8177dda23e5ac98"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.823465 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.823542 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.823563 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"461b86c73ddc7965635564260a91768abff8bdd32a3200412907861dd859e814"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.825129 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.825167 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f6a85a4932eb5b7d8a00a9c1985473c59a13ba642b3a8b4544837e4d4eb3b1bc"} Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.826833 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.829739 4865 scope.go:117] "RemoveContainer" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.829946 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.836332 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.847142 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.859215 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f7c1622c34aa759d218a12e8dfd4dbc31d67553019bfb4957e6e686e81800e17\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:54:44Z\\\",\\\"message\\\":\\\"W0126 16:54:43.777895 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0126 16:54:43.778576 1 crypto.go:601] Generating new CA for check-endpoints-signer@1769446483 cert, and key in /tmp/serving-cert-3566720105/serving-signer.crt, /tmp/serving-cert-3566720105/serving-signer.key\\\\nI0126 16:54:44.053813 1 observer_polling.go:159] Starting file observer\\\\nW0126 16:54:44.056963 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0126 16:54:44.057189 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 16:54:44.061461 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3566720105/tls.crt::/tmp/serving-cert-3566720105/tls.key\\\\\\\"\\\\nF0126 16:54:44.262464 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.917841 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:02 crc kubenswrapper[4865]: E0126 16:55:02.918073 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:03.918042577 +0000 UTC m=+31.501928164 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.942183 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.955243 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.969266 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.983362 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:02 crc kubenswrapper[4865]: I0126 16:55:02.998772 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.015933 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.019061 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.019206 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.019349 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.019484 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019357 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019695 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019777 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019934 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:04.019914839 +0000 UTC m=+31.603800426 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019425 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.020145 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:04.020136486 +0000 UTC m=+31.604022073 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019538 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.019619 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.020415 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.020435 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.020365 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:04.020332451 +0000 UTC m=+31.604218038 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.020517 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:04.020505516 +0000 UTC m=+31.604391303 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.035377 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.053649 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.075415 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.091244 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.104686 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.228037 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.231647 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.237025 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 22:59:48.291372889 +0000 UTC Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.485183 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.504254 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.508918 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-p7qpc"] Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.509322 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.519876 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.520108 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.520905 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.524010 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.624417 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7gvt\" (UniqueName: \"kubernetes.io/projected/000aa434-a8c8-4051-88f4-c50d48ce851b-kube-api-access-m7gvt\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.624466 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/000aa434-a8c8-4051-88f4-c50d48ce851b-hosts-file\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.636581 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.725158 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/000aa434-a8c8-4051-88f4-c50d48ce851b-hosts-file\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.725233 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7gvt\" (UniqueName: \"kubernetes.io/projected/000aa434-a8c8-4051-88f4-c50d48ce851b-kube-api-access-m7gvt\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.725571 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/000aa434-a8c8-4051-88f4-c50d48ce851b-hosts-file\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.855945 4865 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.856948 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7gvt\" (UniqueName: \"kubernetes.io/projected/000aa434-a8c8-4051-88f4-c50d48ce851b-kube-api-access-m7gvt\") pod \"node-resolver-p7qpc\" (UID: \"000aa434-a8c8-4051-88f4-c50d48ce851b\") " pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:03 crc kubenswrapper[4865]: W0126 16:55:03.859259 4865 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 16:55:03 crc kubenswrapper[4865]: W0126 16:55:03.860389 4865 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Jan 26 16:55:03 crc kubenswrapper[4865]: W0126 16:55:03.862444 4865 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.864417 4865 scope.go:117] "RemoveContainer" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.864591 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.864746 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-operator/pods/network-operator-58b4c7f79c-55gtf/status\": read tcp 38.102.83.224:43742->38.102.83.224:6443: use of closed network connection" Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.937874 4865 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.964600 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:03 crc kubenswrapper[4865]: E0126 16:55:03.964696 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:05.964680158 +0000 UTC m=+33.548565745 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:03 crc kubenswrapper[4865]: I0126 16:55:03.964635 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.065746 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.066153 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.066279 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.066391 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.065947 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066552 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066628 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066732 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:06.066718265 +0000 UTC m=+33.650603842 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066245 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066938 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.067045 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066341 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.066471 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.067226 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:06.067122056 +0000 UTC m=+33.651007643 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.067312 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:06.067301691 +0000 UTC m=+33.651187278 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.067399 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:06.067387134 +0000 UTC m=+33.651272731 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.124553 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-p7qpc" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.136353 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: W0126 16:55:04.138359 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod000aa434_a8c8_4051_88f4_c50d48ce851b.slice/crio-016a25909553253a2f7ed689d7cfdf2ede9e0ac8712b13c11f72c32708b6c1a0 WatchSource:0}: Error finding container 016a25909553253a2f7ed689d7cfdf2ede9e0ac8712b13c11f72c32708b6c1a0: Status 404 returned error can't find the container with id 016a25909553253a2f7ed689d7cfdf2ede9e0ac8712b13c11f72c32708b6c1a0 Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.170649 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-q8cb9"] Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.170995 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.171979 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-bz29j"] Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.172161 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.173424 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.175571 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-44x2q"] Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.176443 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.176754 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.176821 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.176772 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.176958 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.177320 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.177483 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.177659 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.177794 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.177927 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.178089 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.200384 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.200784 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.200846 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.200866 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.201023 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.201064 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.201301 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.209390 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-6fsw2"] Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.210293 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.212340 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.220695 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.224907 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.238524 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 20:58:32.307578716 +0000 UTC Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.247568 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268296 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-os-release\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268331 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268347 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268361 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268410 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268435 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-bin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268449 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-multus\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268465 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-hostroot\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268489 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-daemon-config\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.268504 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281491 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-k8s-cni-cncf-io\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281521 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-netns\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281537 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-conf-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281561 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281589 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-kubelet\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281604 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281618 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cnibin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281640 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281654 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281668 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281684 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281700 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-socket-dir-parent\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281714 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281728 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ddedab5-2528-4881-9251-9ba5334aea61-mcd-auth-proxy-config\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281742 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281756 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281770 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ddedab5-2528-4881-9251-9ba5334aea61-rootfs\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281785 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jdbl\" (UniqueName: \"kubernetes.io/projected/0ddedab5-2528-4881-9251-9ba5334aea61-kube-api-access-8jdbl\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281799 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281816 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qkvf\" (UniqueName: \"kubernetes.io/projected/bccdd94a-94b4-4a16-95dd-c375a34f754f-kube-api-access-6qkvf\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281832 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcbts\" (UniqueName: \"kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281866 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-cnibin\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281883 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ddedab5-2528-4881-9251-9ba5334aea61-proxy-tls\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281900 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281916 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79bqr\" (UniqueName: \"kubernetes.io/projected/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-kube-api-access-79bqr\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281930 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-etc-kubernetes\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281943 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-os-release\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281956 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-multus-certs\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281971 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.281985 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282120 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282204 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282242 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cni-binary-copy\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282285 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282311 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282334 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-system-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282361 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-system-cni-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.282381 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.357601 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.357696 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.357867 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.357953 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.358567 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:04 crc kubenswrapper[4865]: E0126 16:55:04.358600 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383182 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-daemon-config\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383223 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-k8s-cni-cncf-io\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383247 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-netns\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383265 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-conf-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383285 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383872 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383896 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383918 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cnibin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383964 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-kubelet\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383334 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384034 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383812 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-k8s-cni-cncf-io\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384067 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383834 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-netns\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.383851 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-conf-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384401 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-daemon-config\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384425 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-kubelet\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384454 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cnibin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384488 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384506 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384522 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384539 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-socket-dir-parent\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384557 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384576 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ddedab5-2528-4881-9251-9ba5334aea61-mcd-auth-proxy-config\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384623 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384653 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.385451 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-socket-dir-parent\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386230 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0ddedab5-2528-4881-9251-9ba5334aea61-mcd-auth-proxy-config\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.384597 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386274 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386295 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386315 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jdbl\" (UniqueName: \"kubernetes.io/projected/0ddedab5-2528-4881-9251-9ba5334aea61-kube-api-access-8jdbl\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386524 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386581 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386834 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387118 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387149 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387184 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qkvf\" (UniqueName: \"kubernetes.io/projected/bccdd94a-94b4-4a16-95dd-c375a34f754f-kube-api-access-6qkvf\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387209 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ddedab5-2528-4881-9251-9ba5334aea61-rootfs\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387236 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-cnibin\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387255 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ddedab5-2528-4881-9251-9ba5334aea61-proxy-tls\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387272 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcbts\" (UniqueName: \"kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387289 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387306 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79bqr\" (UniqueName: \"kubernetes.io/projected/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-kube-api-access-79bqr\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387356 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-etc-kubernetes\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387381 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387530 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/0ddedab5-2528-4881-9251-9ba5334aea61-rootfs\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387559 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-cnibin\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.387938 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.389723 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-etc-kubernetes\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.389829 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-os-release\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.389904 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-multus-certs\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.390028 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.392451 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-os-release\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.386827 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.395011 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.399481 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.395076 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-tuning-conf-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.395478 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.395053 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.398597 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-run-multus-certs\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400101 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.398484 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0ddedab5-2528-4881-9251-9ba5334aea61-proxy-tls\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400373 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400461 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cni-binary-copy\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400538 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400614 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400694 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-system-cni-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400782 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400870 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-system-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.400945 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401045 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401128 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401239 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-os-release\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401322 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401398 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-bin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401492 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-multus\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401568 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-hostroot\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.401705 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-hostroot\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.434772 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-system-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.435692 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bccdd94a-94b4-4a16-95dd-c375a34f754f-cni-binary-copy\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.435862 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.435980 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.436287 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-os-release\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.436636 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-multus-cni-dir\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.436779 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-bin\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.436900 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-host-var-lib-cni-multus\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.440143 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.440240 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.440285 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bccdd94a-94b4-4a16-95dd-c375a34f754f-system-cni-dir\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.440585 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.448367 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcbts\" (UniqueName: \"kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.449250 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert\") pod \"ovnkube-node-44x2q\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.449379 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.450541 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jdbl\" (UniqueName: \"kubernetes.io/projected/0ddedab5-2528-4881-9251-9ba5334aea61-kube-api-access-8jdbl\") pod \"machine-config-daemon-q8cb9\" (UID: \"0ddedab5-2528-4881-9251-9ba5334aea61\") " pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.456651 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79bqr\" (UniqueName: \"kubernetes.io/projected/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-kube-api-access-79bqr\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.457540 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d5c89572-d108-4b35-ab46-dfbbc8b7e3be-cni-binary-copy\") pod \"multus-bz29j\" (UID: \"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\") " pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.463813 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.464687 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.464905 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qkvf\" (UniqueName: \"kubernetes.io/projected/bccdd94a-94b4-4a16-95dd-c375a34f754f-kube-api-access-6qkvf\") pod \"multus-additional-cni-plugins-6fsw2\" (UID: \"bccdd94a-94b4-4a16-95dd-c375a34f754f\") " pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.465825 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.466441 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.467462 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.467932 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.468512 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.469454 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.470058 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.471006 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.471505 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.472512 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.472977 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.473502 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.476772 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.477432 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.478480 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.478885 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.479452 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.481336 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.481937 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.483919 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.484383 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.485459 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.485460 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.485972 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.487600 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.490121 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.490626 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.493178 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.493659 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.494598 4865 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.494700 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.496489 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.498100 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.498991 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.500467 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.501169 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.502215 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.502907 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.505513 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.505952 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.507097 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.508089 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.508665 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.509516 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.509906 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.510122 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.511020 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.511182 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.511807 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.512688 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.513241 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.513704 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.515094 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.515672 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.516611 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.528608 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.528886 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bz29j" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.539443 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.547703 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.547951 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.568110 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: W0126 16:55:04.577278 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c0135f6_4074_4aab_9413_a8eb948cd566.slice/crio-09ba44bf347db4fbc1971fe48cda28178bc63028f515f3c02a68e691dacb46c0 WatchSource:0}: Error finding container 09ba44bf347db4fbc1971fe48cda28178bc63028f515f3c02a68e691dacb46c0: Status 404 returned error can't find the container with id 09ba44bf347db4fbc1971fe48cda28178bc63028f515f3c02a68e691dacb46c0 Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.582467 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.606789 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.625626 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.650577 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.662941 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.676255 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.702869 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.727029 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.741048 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.768340 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.791965 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.836358 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.891194 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"09ba44bf347db4fbc1971fe48cda28178bc63028f515f3c02a68e691dacb46c0"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.893701 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerStarted","Data":"82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.893765 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerStarted","Data":"a53bcc4421d98386c2e19828513d83bdd1b388f74aed12fc68a7426059a7aa97"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.895278 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerStarted","Data":"de7e8a41868b35cebee3164c15d57804eea88255dfa32b6f6f7569283e1f266b"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.901389 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.901467 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"59c5602c28a9ccda82157c18ccb111dce2c878ba7446ce213a2ecb6378f35e77"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.903131 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p7qpc" event={"ID":"000aa434-a8c8-4051-88f4-c50d48ce851b","Type":"ContainerStarted","Data":"b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.903179 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-p7qpc" event={"ID":"000aa434-a8c8-4051-88f4-c50d48ce851b","Type":"ContainerStarted","Data":"016a25909553253a2f7ed689d7cfdf2ede9e0ac8712b13c11f72c32708b6c1a0"} Jan 26 16:55:04 crc kubenswrapper[4865]: I0126 16:55:04.975696 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.159515 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.185520 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.233431 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.233869 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.238713 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 17:24:31.078799823 +0000 UTC Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.252726 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.273375 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.290124 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.302029 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.312152 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.316498 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.332341 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.349092 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.364919 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.380285 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.397659 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.423245 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.433764 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.448434 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.920742 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a" exitCode=0 Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.920813 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a"} Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.924136 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62"} Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.925891 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be"} Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.927176 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" exitCode=0 Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.927962 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.957279 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.972880 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:05 crc kubenswrapper[4865]: I0126 16:55:05.988546 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:05Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.021204 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.021591 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:10.021485135 +0000 UTC m=+37.605370732 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.036134 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.059384 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.077766 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.091018 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.104642 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.119347 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.122858 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.122914 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.122938 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.122962 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123092 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123094 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123132 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123147 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123150 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123108 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123204 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123198 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:10.123184562 +0000 UTC m=+37.707070149 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123250 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:10.123235524 +0000 UTC m=+37.707121111 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123262 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:10.123256684 +0000 UTC m=+37.707142271 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123295 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.123317 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:10.123309626 +0000 UTC m=+37.707195213 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.138237 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.154429 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.165903 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.178458 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.191397 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.203774 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.223961 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.239678 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.268692 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.297259 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.314318 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.328909 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.342741 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-45gx2"] Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.343140 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.345259 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.345407 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.345961 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.346161 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.346284 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.395778 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 23:05:38.36917218 +0000 UTC Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.399218 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.399259 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.399255 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.399399 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.399481 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:06 crc kubenswrapper[4865]: E0126 16:55:06.399563 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.365327 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.420716 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.425197 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f950c5c3-b446-44cf-9de5-dd7ff03b615f-serviceca\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.425281 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwt2v\" (UniqueName: \"kubernetes.io/projected/f950c5c3-b446-44cf-9de5-dd7ff03b615f-kube-api-access-lwt2v\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.425328 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f950c5c3-b446-44cf-9de5-dd7ff03b615f-host\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.434516 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.493338 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.521945 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.527101 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f950c5c3-b446-44cf-9de5-dd7ff03b615f-host\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.527272 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f950c5c3-b446-44cf-9de5-dd7ff03b615f-serviceca\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.527198 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f950c5c3-b446-44cf-9de5-dd7ff03b615f-host\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.527380 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwt2v\" (UniqueName: \"kubernetes.io/projected/f950c5c3-b446-44cf-9de5-dd7ff03b615f-kube-api-access-lwt2v\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.528422 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/f950c5c3-b446-44cf-9de5-dd7ff03b615f-serviceca\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.533077 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.550872 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.557455 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwt2v\" (UniqueName: \"kubernetes.io/projected/f950c5c3-b446-44cf-9de5-dd7ff03b615f-kube-api-access-lwt2v\") pod \"node-ca-45gx2\" (UID: \"f950c5c3-b446-44cf-9de5-dd7ff03b615f\") " pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.561410 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.579405 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.593568 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.624085 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.657223 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.685821 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.704280 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.730843 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.743854 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.755069 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.767725 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.810149 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-45gx2" Jan 26 16:55:06 crc kubenswrapper[4865]: W0126 16:55:06.824764 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf950c5c3_b446_44cf_9de5_dd7ff03b615f.slice/crio-e58e15f7dfb4d80571386efd09ec36298120e0246767d601d04fff01755a5f6d WatchSource:0}: Error finding container e58e15f7dfb4d80571386efd09ec36298120e0246767d601d04fff01755a5f6d: Status 404 returned error can't find the container with id e58e15f7dfb4d80571386efd09ec36298120e0246767d601d04fff01755a5f6d Jan 26 16:55:06 crc kubenswrapper[4865]: I0126 16:55:06.931285 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-45gx2" event={"ID":"f950c5c3-b446-44cf-9de5-dd7ff03b615f","Type":"ContainerStarted","Data":"e58e15f7dfb4d80571386efd09ec36298120e0246767d601d04fff01755a5f6d"} Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.395978 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 04:31:36.507277527 +0000 UTC Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.943500 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-45gx2" event={"ID":"f950c5c3-b446-44cf-9de5-dd7ff03b615f","Type":"ContainerStarted","Data":"10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b"} Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.946278 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerStarted","Data":"8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2"} Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.948605 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.959309 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:07Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.970322 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:07Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.980659 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:07Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:07 crc kubenswrapper[4865]: I0126 16:55:07.992598 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:07Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.005785 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.017882 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.029583 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.041676 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.081842 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.098559 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.110346 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.131261 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.171696 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.200021 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.217108 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.240873 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.255270 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.269276 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.292222 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.307850 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.320390 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.337936 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.351602 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.359630 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.359764 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.359870 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.359790 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.360047 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.360236 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.364018 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.378116 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.391489 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.397049 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 17:54:19.643141571 +0000 UTC Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.402925 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.420145 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.634543 4865 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.639223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.639271 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.639285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.639503 4865 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.655088 4865 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.655565 4865 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.658178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.658218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.658230 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.658253 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.658266 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.677640 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.681792 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.681834 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.681848 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.681870 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.681882 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.696521 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.700011 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.700055 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.700066 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.700081 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.700090 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.711935 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.716749 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.716811 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.716823 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.716845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.716858 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.734242 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.741053 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.741116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.741130 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.741155 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.741171 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.756896 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: E0126 16:55:08.757057 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.758937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.758976 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.758988 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.759046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.759063 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.861862 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.861918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.861934 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.861953 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.861965 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.954011 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2" exitCode=0 Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.954110 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.958335 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.958410 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.958428 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.958447 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.958464 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.964085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.964128 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.964137 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.964151 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.964162 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:08Z","lastTransitionTime":"2026-01-26T16:55:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.972341 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:08 crc kubenswrapper[4865]: I0126 16:55:08.988582 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:08Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.003129 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.026253 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.042134 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.159279 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.169105 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.169143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.169153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.169262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.169290 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.177144 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.193773 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.209184 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.226253 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.253981 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.276930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.276971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.276982 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.277023 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.277036 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.311240 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.339593 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.355375 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.379594 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.379664 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.379677 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.379716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.379733 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.398054 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 12:36:49.5676912 +0000 UTC Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.483069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.483120 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.483132 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.483149 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.483161 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.586403 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.586465 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.586483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.586509 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.586528 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.689964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.690069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.690089 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.690114 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.690136 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.793663 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.793711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.793723 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.793739 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.793748 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.896849 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.896915 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.896932 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.896957 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.897028 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:09Z","lastTransitionTime":"2026-01-26T16:55:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.965292 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081" exitCode=0 Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.965352 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081"} Jan 26 16:55:09 crc kubenswrapper[4865]: I0126 16:55:09.990368 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.000075 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.000109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.000123 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.000139 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.000152 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.007776 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.028091 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.044975 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.059952 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.060657 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.061118 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:18.061085594 +0000 UTC m=+45.644971181 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.072655 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.085214 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.097828 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.102389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.102419 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.102429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.102444 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.102454 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.111054 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.127660 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.140850 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.160538 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.161234 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.161291 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.161320 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.161350 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161469 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161510 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161558 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161556 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161575 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161573 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161621 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161534 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:18.161515376 +0000 UTC m=+45.745400963 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161643 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161703 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:18.16165702 +0000 UTC m=+45.745542637 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161733 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:18.161718022 +0000 UTC m=+45.745603709 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.161756 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:18.161745873 +0000 UTC m=+45.745631570 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.177982 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.191300 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.205240 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.205278 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.205290 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.205321 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.205336 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.308440 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.308502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.308511 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.308524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.308534 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.357261 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.357414 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.358213 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.358394 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.358455 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:10 crc kubenswrapper[4865]: E0126 16:55:10.358978 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.399092 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 13:56:21.298993795 +0000 UTC Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.411284 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.411324 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.411338 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.411358 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.411370 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.514107 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.514152 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.514164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.514181 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.514194 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.617482 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.617575 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.617594 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.617618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.617631 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.720116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.720162 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.720174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.720190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.720200 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.822911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.822951 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.822963 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.822980 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.823014 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.925474 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.925531 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.925545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.925566 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.925578 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:10Z","lastTransitionTime":"2026-01-26T16:55:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:10 crc kubenswrapper[4865]: I0126 16:55:10.971815 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerStarted","Data":"603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.028640 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.028717 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.028727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.028743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.028755 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.131817 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.131873 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.131891 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.131916 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.131936 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.235648 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.235692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.235705 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.235722 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.235734 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.338208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.338263 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.338275 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.338290 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.338302 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.399657 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:11:20.567886659 +0000 UTC Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.441157 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.441219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.441233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.441257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.441272 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.544473 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.544545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.544558 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.544601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.544615 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.647751 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.647798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.647810 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.647830 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.647845 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.751034 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.751072 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.751082 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.751097 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.751108 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.855654 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.855712 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.855726 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.855743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.855757 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.958801 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.958837 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.958846 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.958859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:11 crc kubenswrapper[4865]: I0126 16:55:11.958868 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:11Z","lastTransitionTime":"2026-01-26T16:55:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.061591 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.061652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.061664 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.061683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.061696 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.164540 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.164594 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.164603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.164617 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.164627 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.266529 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.266568 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.266577 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.266590 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.266601 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.357788 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.357851 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:12 crc kubenswrapper[4865]: E0126 16:55:12.357933 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.357814 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:12 crc kubenswrapper[4865]: E0126 16:55:12.358096 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:12 crc kubenswrapper[4865]: E0126 16:55:12.358204 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.369690 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.369750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.369763 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.369781 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.369793 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.399830 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 13:14:01.770236204 +0000 UTC Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.472186 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.472241 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.472252 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.472272 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.472287 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.577108 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.577146 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.577156 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.577173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.577186 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.679841 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.679932 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.679947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.679965 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.679976 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.782193 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.782249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.782264 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.782283 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.782298 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.884237 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.884265 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.884275 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.884288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.884298 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.980232 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.984969 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5" exitCode=0 Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.985051 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5"} Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.987943 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.987991 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.988019 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.988039 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:12 crc kubenswrapper[4865]: I0126 16:55:12.990080 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:12Z","lastTransitionTime":"2026-01-26T16:55:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.011424 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.028856 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.045959 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.060450 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.074498 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.092670 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.094653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.094699 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.094710 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.094727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.094742 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.111294 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.131775 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.150218 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.162363 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.178296 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.193527 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.197964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.198046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.198062 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.198085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.198097 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.209780 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.225025 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:13Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.300678 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.300732 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.300742 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.300763 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.300778 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.400936 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 10:11:48.937946856 +0000 UTC Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.403821 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.403882 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.403896 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.403919 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.403933 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.507644 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.507702 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.507718 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.507736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.507749 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.610506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.610577 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.610590 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.610609 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.610737 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.713238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.713299 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.713309 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.713332 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.713344 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.816451 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.816523 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.816543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.816569 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.816591 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.920281 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.920362 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.920379 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.920404 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:13 crc kubenswrapper[4865]: I0126 16:55:13.920430 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:13Z","lastTransitionTime":"2026-01-26T16:55:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.090172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.090218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.090228 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.090247 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.090258 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.096730 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerStarted","Data":"23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.117631 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.137037 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.151181 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.167986 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.179103 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193417 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193461 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193600 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193654 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.193670 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.207214 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.221351 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.236568 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.251281 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.266117 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.281828 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.297576 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.297663 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.297678 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.297710 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.297737 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.305404 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.321940 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.357184 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.357250 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:14 crc kubenswrapper[4865]: E0126 16:55:14.357342 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.357354 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:14 crc kubenswrapper[4865]: E0126 16:55:14.357429 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:14 crc kubenswrapper[4865]: E0126 16:55:14.357520 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.371884 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.391669 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.400424 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.400472 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.400486 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.400504 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.400517 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.401147 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 00:11:15.75479788 +0000 UTC Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.406147 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.420461 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.435914 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.447964 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.458373 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.471618 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.483784 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.495132 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.503438 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.503492 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.503506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.503527 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.503542 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.507484 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.520376 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.531758 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.543510 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.605813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.605866 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.605879 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.605900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.605916 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.757285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.757760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.757773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.757794 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.757808 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.861201 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.861249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.861748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.861786 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.861803 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.965472 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.965557 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.965582 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.965611 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:14 crc kubenswrapper[4865]: I0126 16:55:14.965631 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:14Z","lastTransitionTime":"2026-01-26T16:55:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.067841 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.067884 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.067896 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.067913 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.067925 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.170556 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.170613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.170635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.170661 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.170682 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.274501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.274614 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.274633 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.274728 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.274771 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.358491 4865 scope.go:117] "RemoveContainer" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.376948 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.376981 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.377007 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.377024 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.377036 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.401421 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 12:38:17.132122332 +0000 UTC Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.478742 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.478777 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.478789 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.478808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.478821 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.581774 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.581810 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.581819 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.581861 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.581873 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.684661 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.684708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.684719 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.684785 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.684801 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.787864 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.787918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.787931 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.787952 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.787967 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.890842 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.890881 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.890892 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.890907 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:15 crc kubenswrapper[4865]: I0126 16:55:15.890918 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:15Z","lastTransitionTime":"2026-01-26T16:55:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.022599 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.022670 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.022682 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.022705 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.022720 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.108709 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.109144 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.109185 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.111440 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.113388 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.113764 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.126681 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.127645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.127694 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.127704 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.127725 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.127737 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.138516 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.157398 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.169650 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.171982 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.183383 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.196618 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.209658 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.223244 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.234906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.234955 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.234979 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.235035 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.235050 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.246928 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.261910 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.276062 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.288844 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.302205 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.324350 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.338442 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.338502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.338515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.338533 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.338546 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.341544 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.355835 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.356907 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:16 crc kubenswrapper[4865]: E0126 16:55:16.357092 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.356932 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:16 crc kubenswrapper[4865]: E0126 16:55:16.357193 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.356907 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:16 crc kubenswrapper[4865]: E0126 16:55:16.357266 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.370732 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.381765 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.397147 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.402596 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 23:52:45.10927811 +0000 UTC Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.415650 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.431776 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.441836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.441877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.441890 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.441909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.441920 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.446807 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.470251 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.486922 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.499031 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.545120 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.545176 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.545189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.545207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.545223 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.553222 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.577059 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.601589 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:16Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.647608 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.647671 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.647682 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.647695 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.647705 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.750150 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.750203 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.750215 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.750235 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.750248 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.853026 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.853070 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.853082 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.853096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.853107 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.957244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.957285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.957294 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.957313 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:16 crc kubenswrapper[4865]: I0126 16:55:16.957325 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:16Z","lastTransitionTime":"2026-01-26T16:55:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.061609 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.061675 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.061686 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.061708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.061722 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.124875 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663" exitCode=0 Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.125026 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.125974 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.143335 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.163378 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165729 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165834 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165840 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165849 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.165926 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.185739 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.208515 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.222241 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.238645 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.257560 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.267713 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.267744 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.267752 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.267765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.267775 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.270283 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.285465 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.302852 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.323387 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.340387 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.353887 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.370603 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.371093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.371137 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.371150 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.371166 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.371513 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.383082 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.396049 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.403645 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 01:40:25.864946322 +0000 UTC Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.410305 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.424908 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.438375 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.453155 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.469201 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.473803 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.473836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.473845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.473859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.473890 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.482173 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.492943 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.510884 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.527616 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.540056 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.552325 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.570529 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.576565 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.576606 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.576623 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.576644 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.576654 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.678911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.678976 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.679036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.679065 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.679082 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.782577 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.782627 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.782637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.782655 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.782666 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.885699 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.885752 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.885764 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.885784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.885798 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.905319 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc"] Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.905934 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.908274 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.908590 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.922965 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.936579 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.957107 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.976092 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.989180 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.989249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.989263 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.989285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.989299 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:17Z","lastTransitionTime":"2026-01-26T16:55:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:17 crc kubenswrapper[4865]: I0126 16:55:17.993273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:17Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.005830 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.019700 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.033835 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.046939 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.059936 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.060452 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9g7h\" (UniqueName: \"kubernetes.io/projected/27ddaae5-b604-4352-b756-3a4ee374b6e3-kube-api-access-f9g7h\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.060553 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.060581 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.060715 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.075644 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.091487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.091543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.091554 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.091578 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.091592 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.092879 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.105575 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.119283 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.133095 4865 generic.go:334] "Generic (PLEG): container finished" podID="bccdd94a-94b4-4a16-95dd-c375a34f754f" containerID="012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38" exitCode=0 Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.133898 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerDied","Data":"012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.138553 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161087 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161192 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.161232 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.161213677 +0000 UTC m=+61.745099264 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161283 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9g7h\" (UniqueName: \"kubernetes.io/projected/27ddaae5-b604-4352-b756-3a4ee374b6e3-kube-api-access-f9g7h\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161306 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161328 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161756 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.161792 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/27ddaae5-b604-4352-b756-3a4ee374b6e3-env-overrides\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.165291 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.168796 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/27ddaae5-b604-4352-b756-3a4ee374b6e3-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.178964 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9g7h\" (UniqueName: \"kubernetes.io/projected/27ddaae5-b604-4352-b756-3a4ee374b6e3-kube-api-access-f9g7h\") pod \"ovnkube-control-plane-749d76644c-z64kc\" (UID: \"27ddaae5-b604-4352-b756-3a4ee374b6e3\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.179894 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.193645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.193700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.193716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.193737 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.193752 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.197140 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.210615 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.218316 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.222414 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.237023 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.253795 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.262583 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.262819 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.262870 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.262889 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.262965 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.262942955 +0000 UTC m=+61.846828712 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.263068 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.263108 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.263137 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263783 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263850 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.26383153 +0000 UTC m=+61.847717117 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263920 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263933 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263947 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.263979 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.263972614 +0000 UTC m=+61.847858201 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.264340 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.264382 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.264373455 +0000 UTC m=+61.848259042 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.267161 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.280207 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296896 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296870 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.296985 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.309134 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.328579 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.340485 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.352914 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.358078 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.358210 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.358479 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.358526 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.358650 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.358696 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.363872 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.399613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.399646 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.399654 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.399671 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.399681 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.404102 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 06:11:12.057679705 +0000 UTC Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.502091 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.502160 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.502173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.502191 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.502203 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.605588 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.605673 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.605694 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.605720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.605734 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.667465 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-wx7wp"] Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.668110 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.668185 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.680195 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.698113 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.709113 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.709298 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.709317 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.709345 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.709358 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.715700 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.726817 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.737899 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.751788 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.766344 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.767718 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.767800 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dr8s\" (UniqueName: \"kubernetes.io/projected/27ebe888-610a-47c4-b256-3ddbf03f83b9-kube-api-access-2dr8s\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.779086 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.791562 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.805926 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.812503 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.812546 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.812555 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.812572 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.812583 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.821158 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.836413 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.852195 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.867457 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.869413 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.869495 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dr8s\" (UniqueName: \"kubernetes.io/projected/27ebe888-610a-47c4-b256-3ddbf03f83b9-kube-api-access-2dr8s\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.869741 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: E0126 16:55:18.869789 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:19.369777438 +0000 UTC m=+46.953663025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.882166 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.885816 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dr8s\" (UniqueName: \"kubernetes.io/projected/27ebe888-610a-47c4-b256-3ddbf03f83b9-kube-api-access-2dr8s\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.904376 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:18Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.915619 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.915656 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.915665 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.915680 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.915690 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.992718 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.992777 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.992788 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.992807 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:18 crc kubenswrapper[4865]: I0126 16:55:18.992819 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:18Z","lastTransitionTime":"2026-01-26T16:55:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.006660 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:19Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.011318 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.011355 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.011367 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.011388 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.011402 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.025962 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:19Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.030820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.030855 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.030865 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.030883 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.030896 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.055482 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:19Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.060524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.060549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.060558 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.060585 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.060596 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.072398 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:19Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.076137 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.076162 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.076170 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.076184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.076193 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.091295 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:19Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.091479 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.093695 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.093742 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.093757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.093780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.093793 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.138616 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" event={"ID":"27ddaae5-b604-4352-b756-3a4ee374b6e3","Type":"ContainerStarted","Data":"792276a8d5b5b8871ac05974be04d161e55045286b344104315a2d13a633e6bb"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.203622 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.203682 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.203696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.203716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.203730 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.306910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.306976 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.307014 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.307042 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.307057 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.383362 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.383698 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:19 crc kubenswrapper[4865]: E0126 16:55:19.383865 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:20.383831618 +0000 UTC m=+47.967717375 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.405459 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 08:10:21.191884602 +0000 UTC Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.409838 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.409894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.409909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.409930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.409943 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.513570 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.513612 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.513625 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.513642 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.513652 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.616754 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.616797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.616807 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.616825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.616836 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.730553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.730607 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.730618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.730637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.730648 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.834310 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.834356 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.834365 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.834382 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.834394 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.939310 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.939372 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.939391 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.939417 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:19 crc kubenswrapper[4865]: I0126 16:55:19.939436 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:19Z","lastTransitionTime":"2026-01-26T16:55:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.043009 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.043066 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.043080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.043099 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.043111 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.143318 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" event={"ID":"27ddaae5-b604-4352-b756-3a4ee374b6e3","Type":"ContainerStarted","Data":"2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.147786 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" event={"ID":"bccdd94a-94b4-4a16-95dd-c375a34f754f","Type":"ContainerStarted","Data":"90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.164037 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.174016 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.183363 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.192733 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.201840 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.215833 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.233960 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.246887 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.260265 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.260313 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.260327 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.260347 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.260363 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.279266 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.352034 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.357069 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.357236 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.357381 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.357383 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.357458 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.357510 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.357678 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.357744 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.362886 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.362911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.362918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.362930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.362941 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.390577 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.401849 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.406079 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 15:45:38.663571223 +0000 UTC Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.417299 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.436415 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.453493 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.473353 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.475177 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.475195 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.475204 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.475218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.475227 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.599468 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.599522 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.599535 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.599556 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.599572 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.600189 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.600408 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:20 crc kubenswrapper[4865]: E0126 16:55:20.600485 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:22.600464124 +0000 UTC m=+50.184349721 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.740088 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.740491 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.740606 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.740716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.740815 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.843824 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.843880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.843894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.843918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.843936 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.947510 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.947549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.947559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.947576 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:20 crc kubenswrapper[4865]: I0126 16:55:20.947586 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:20Z","lastTransitionTime":"2026-01-26T16:55:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.050505 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.050548 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.050557 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.050573 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.050583 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.249450 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.249540 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.249551 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.249622 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.249642 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.255551 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" event={"ID":"27ddaae5-b604-4352-b756-3a4ee374b6e3","Type":"ContainerStarted","Data":"4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.273764 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.287257 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.302797 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.317273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.331126 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.344259 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.351833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.351892 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.351979 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.352029 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.352045 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.361841 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.377436 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.393807 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.406560 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 17:37:23.44457722 +0000 UTC Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.408294 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.422120 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.437796 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.454268 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.454307 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.454319 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.454334 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.454379 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.460332 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.483150 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.503036 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.520889 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:21Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.557481 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.557579 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.557593 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.557610 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.557622 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.715484 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.715545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.715557 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.715574 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.715588 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.819338 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.819884 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.819907 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.819926 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.819938 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.922591 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.922630 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.922638 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.922654 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:21 crc kubenswrapper[4865]: I0126 16:55:21.922759 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:21Z","lastTransitionTime":"2026-01-26T16:55:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.026142 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.026204 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.026213 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.026234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.026246 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.129531 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.129591 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.129606 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.129628 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.129641 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.233551 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.233616 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.233626 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.233651 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.233665 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.336880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.337009 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.337072 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.337355 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.343599 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.357177 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.357258 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.357330 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.357464 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.357258 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.357623 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.357795 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.358387 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.406811 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 16:32:39.039658449 +0000 UTC Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.447345 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.447422 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.447435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.447457 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.447479 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.550645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.550700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.550709 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.550730 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.550743 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.622819 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.623142 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:22 crc kubenswrapper[4865]: E0126 16:55:22.623274 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:26.623246016 +0000 UTC m=+54.207131603 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.654198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.654262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.654274 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.654297 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.654310 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.757617 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.757683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.757696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.757718 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.757738 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.860834 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.860910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.860921 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.860941 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.860953 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.964359 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.964407 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.964417 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.964437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:22 crc kubenswrapper[4865]: I0126 16:55:22.964448 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:22Z","lastTransitionTime":"2026-01-26T16:55:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.067453 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.067508 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.067527 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.067553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.067569 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.170659 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.171252 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.171476 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.171640 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.171794 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.290806 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/0.log" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.293349 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.293528 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.293900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.294061 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.294351 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.295129 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9" exitCode=1 Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.295197 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.296118 4865 scope.go:117] "RemoveContainer" containerID="6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.316027 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.329186 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.345605 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.363013 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.376107 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.391382 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.397535 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.397609 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.397623 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.397652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.397667 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.407057 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 23:33:16.27244747 +0000 UTC Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.407412 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.422089 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.438957 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.453491 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.471705 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.488467 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.500049 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.500080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.500093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.500114 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.500126 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.508498 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.525574 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.543545 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.569273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:22Z\\\",\\\"message\\\":\\\"oval\\\\nI0126 16:55:22.075414 6098 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 16:55:22.075439 6098 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:55:22.075489 6098 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:55:22.075500 6098 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:55:22.075540 6098 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0126 16:55:22.075552 6098 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0126 16:55:22.075571 6098 factory.go:656] Stopping watch factory\\\\nI0126 16:55:22.075597 6098 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0126 16:55:22.075612 6098 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:55:22.075630 6098 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:55:22.075640 6098 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:55:22.075653 6098 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0126 16:55:22.075656 6098 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 16:55:22.075712 6098 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:55:22.075727 6098 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:55:22.075738 6098 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:23Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.603744 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.603803 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.603813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.603833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.603848 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.706815 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.706860 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.706871 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.706889 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.706901 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.810472 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.810529 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.810542 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.810562 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.810575 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.913332 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.913390 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.913400 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.913422 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:23 crc kubenswrapper[4865]: I0126 16:55:23.913433 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:23Z","lastTransitionTime":"2026-01-26T16:55:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.016525 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.016603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.016618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.016647 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.016663 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.120573 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.120623 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.120634 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.120657 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.120670 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.224252 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.224309 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.224321 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.224341 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.224354 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.303806 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/0.log" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.307759 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.308294 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.325194 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.328211 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.328269 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.328288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.328306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.328319 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.342112 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.357559 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.357652 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:24 crc kubenswrapper[4865]: E0126 16:55:24.357747 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.357818 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:24 crc kubenswrapper[4865]: E0126 16:55:24.357922 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:24 crc kubenswrapper[4865]: E0126 16:55:24.358022 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.358236 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:24 crc kubenswrapper[4865]: E0126 16:55:24.358340 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.361354 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.375827 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.386564 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.402186 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.407951 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 13:51:50.16326651 +0000 UTC Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.418931 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.431694 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.431743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.431754 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.431775 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.431789 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.433496 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.448603 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.466441 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.483906 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.499507 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.522298 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:22Z\\\",\\\"message\\\":\\\"oval\\\\nI0126 16:55:22.075414 6098 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 16:55:22.075439 6098 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:55:22.075489 6098 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:55:22.075500 6098 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:55:22.075540 6098 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0126 16:55:22.075552 6098 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0126 16:55:22.075571 6098 factory.go:656] Stopping watch factory\\\\nI0126 16:55:22.075597 6098 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0126 16:55:22.075612 6098 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:55:22.075630 6098 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:55:22.075640 6098 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:55:22.075653 6098 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0126 16:55:22.075656 6098 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 16:55:22.075712 6098 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:55:22.075727 6098 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:55:22.075738 6098 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.536438 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.536497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.536511 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.536533 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.536549 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.538045 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.556236 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.570343 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.584503 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.598968 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.613691 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.628165 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.639561 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.639633 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.639653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.639676 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.639692 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.641844 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.658538 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.670655 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.686714 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.707052 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.725635 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.743772 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.743811 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.743820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.743838 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.743849 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.759170 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.781072 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:22Z\\\",\\\"message\\\":\\\"oval\\\\nI0126 16:55:22.075414 6098 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 16:55:22.075439 6098 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:55:22.075489 6098 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:55:22.075500 6098 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:55:22.075540 6098 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0126 16:55:22.075552 6098 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0126 16:55:22.075571 6098 factory.go:656] Stopping watch factory\\\\nI0126 16:55:22.075597 6098 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0126 16:55:22.075612 6098 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:55:22.075630 6098 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:55:22.075640 6098 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:55:22.075653 6098 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0126 16:55:22.075656 6098 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 16:55:22.075712 6098 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:55:22.075727 6098 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:55:22.075738 6098 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.799178 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.813318 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.832093 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.846499 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.846563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.846577 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.846599 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.846614 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.848431 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.950119 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.950186 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.950200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.950223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:24 crc kubenswrapper[4865]: I0126 16:55:24.950239 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:24Z","lastTransitionTime":"2026-01-26T16:55:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.053885 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.053935 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.053947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.053968 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.053983 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.157437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.157502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.157511 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.157531 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.157543 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.262176 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.262283 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.262302 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.262326 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.262339 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.313619 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/1.log" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.314294 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/0.log" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.317626 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb" exitCode=1 Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.317684 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.317778 4865 scope.go:117] "RemoveContainer" containerID="6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.318423 4865 scope.go:117] "RemoveContainer" containerID="22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb" Jan 26 16:55:25 crc kubenswrapper[4865]: E0126 16:55:25.318614 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.364755 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.366073 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.366129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.366143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.366164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.366175 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.384516 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.400176 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.409122 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 23:30:36.960963116 +0000 UTC Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.417078 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.434541 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.451328 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.466931 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.469849 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.469926 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.470038 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.470068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.470086 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.489973 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6617fec99a4214fd209e7909fef99f9b57cfe3163afb20fc16be308d253066f9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:22Z\\\",\\\"message\\\":\\\"oval\\\\nI0126 16:55:22.075414 6098 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0126 16:55:22.075439 6098 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:55:22.075489 6098 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:55:22.075500 6098 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:55:22.075540 6098 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0126 16:55:22.075552 6098 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0126 16:55:22.075571 6098 factory.go:656] Stopping watch factory\\\\nI0126 16:55:22.075597 6098 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0126 16:55:22.075612 6098 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:55:22.075630 6098 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:55:22.075640 6098 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:55:22.075653 6098 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0126 16:55:22.075656 6098 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0126 16:55:22.075712 6098 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:55:22.075727 6098 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:55:22.075738 6098 handler.go:208] Removed *v1.EgressFirewall ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.508079 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.522401 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.538128 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.553421 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.571339 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.576202 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.576269 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.576283 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.576314 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.576326 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.594532 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.606814 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.621056 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:25Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.679552 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.679649 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.679664 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.679687 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.679705 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.782163 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.782223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.782238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.782257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.782268 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.885440 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.885499 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.885509 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.885530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.885543 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.988707 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.989261 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.989274 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.989300 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:25 crc kubenswrapper[4865]: I0126 16:55:25.989312 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:25Z","lastTransitionTime":"2026-01-26T16:55:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.092531 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.092593 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.092607 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.092629 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.092641 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.195792 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.195852 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.195866 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.195886 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.195899 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.299262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.299308 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.299362 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.299383 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.299395 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.324408 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/1.log" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.330238 4865 scope.go:117] "RemoveContainer" containerID="22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.330445 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.347484 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.357100 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.357212 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.357271 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.357306 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.357384 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.357459 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.357481 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.357827 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.365254 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.385212 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.403292 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.403354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.403370 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.403389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.403401 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.405250 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.409477 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 00:35:55.575712499 +0000 UTC Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.432440 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.450279 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.465216 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.484399 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.497847 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.505650 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.505692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.505703 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.505721 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.505734 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.513464 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.527297 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.542223 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.556686 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.571684 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.587261 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.600091 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:26Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.608216 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.608261 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.608272 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.608289 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.608307 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.677330 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.677590 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:26 crc kubenswrapper[4865]: E0126 16:55:26.677738 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:34.677694213 +0000 UTC m=+62.261579970 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.711085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.711139 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.711151 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.711167 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.711180 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.814069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.814128 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.814147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.814169 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.814182 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.917722 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.917796 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.917811 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.917836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:26 crc kubenswrapper[4865]: I0126 16:55:26.917849 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:26Z","lastTransitionTime":"2026-01-26T16:55:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.022156 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.022218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.022230 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.022251 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.022262 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.126035 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.126086 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.126096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.126115 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.126126 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.229016 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.229066 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.229078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.229095 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.229107 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.332396 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.332743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.332847 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.332954 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.333069 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.410397 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 20:29:07.333231432 +0000 UTC Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.435840 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.436184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.436299 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.436407 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.436762 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.539513 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.539582 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.539602 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.539623 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.539637 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.642499 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.642547 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.642558 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.642574 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.642585 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.745185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.745230 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.745251 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.745273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.745287 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.847739 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.847808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.847818 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.847838 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.847852 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.951096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.951143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.951155 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.951173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:27 crc kubenswrapper[4865]: I0126 16:55:27.951185 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:27Z","lastTransitionTime":"2026-01-26T16:55:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.053652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.053700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.053708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.053721 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.053731 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.156685 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.156757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.156768 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.156784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.156796 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.261937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.262015 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.262034 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.262054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.262106 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.357053 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.357053 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:28 crc kubenswrapper[4865]: E0126 16:55:28.357281 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.357053 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:28 crc kubenswrapper[4865]: E0126 16:55:28.357359 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.357093 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:28 crc kubenswrapper[4865]: E0126 16:55:28.357541 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:28 crc kubenswrapper[4865]: E0126 16:55:28.357640 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.365577 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.365625 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.365639 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.365659 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.365670 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.410966 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 22:37:25.86116898 +0000 UTC Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.469078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.469136 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.469144 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.469159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.469169 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.573906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.573964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.573974 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.574010 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.574022 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.677575 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.677638 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.677649 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.677670 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.677684 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.780266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.780311 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.780320 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.780342 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.780358 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.884545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.884620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.884637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.884658 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.884698 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.987860 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.987909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.987917 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.987936 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:28 crc kubenswrapper[4865]: I0126 16:55:28.987951 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:28Z","lastTransitionTime":"2026-01-26T16:55:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.090932 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.091057 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.091085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.091109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.091124 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.194152 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.194237 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.194254 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.194275 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.194290 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.200069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.200119 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.200131 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.200147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.200160 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.215509 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.220771 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.220834 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.220853 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.220881 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.220901 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.221414 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.232897 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.237699 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.238657 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.242730 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.242768 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.242781 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.242803 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.242817 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.252912 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.255596 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.261470 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.261546 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.261559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.261584 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.261605 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.270958 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.276261 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.280634 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.280679 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.280688 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.280707 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.280718 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.286842 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.293189 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: E0126 16:55:29.293337 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.297079 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.297125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.297140 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.297160 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.297175 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.299776 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.313142 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.326524 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.339407 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.353813 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.368800 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.380828 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.392682 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.400071 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.400118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.400134 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.400157 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.400173 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.411472 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 19:54:59.734765993 +0000 UTC Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.416203 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.434142 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.448236 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.467968 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:29Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.502891 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.502941 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.502951 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.502968 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.502982 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.605859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.605924 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.605947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.605970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.605986 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.710102 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.710178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.710189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.710207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.710219 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.813553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.813601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.813613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.813629 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.813641 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.916403 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.916449 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.916459 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.916475 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:29 crc kubenswrapper[4865]: I0126 16:55:29.916486 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:29Z","lastTransitionTime":"2026-01-26T16:55:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.020096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.020161 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.020172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.020192 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.020204 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.123608 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.123683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.123708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.123730 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.123746 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.227404 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.227501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.227524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.227550 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.227568 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.330898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.331017 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.331033 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.331058 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.331073 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.357768 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.357975 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.358032 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.358177 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:30 crc kubenswrapper[4865]: E0126 16:55:30.358176 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:30 crc kubenswrapper[4865]: E0126 16:55:30.358741 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:30 crc kubenswrapper[4865]: E0126 16:55:30.358928 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:30 crc kubenswrapper[4865]: E0126 16:55:30.359103 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.412204 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 09:52:11.312235745 +0000 UTC Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.433700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.433750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.433760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.433779 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.433792 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.537357 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.537408 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.537425 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.537449 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.537466 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.641116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.641170 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.641179 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.641198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.641209 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.744043 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.744150 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.744168 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.744191 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.744209 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.847068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.847127 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.847141 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.847162 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.847177 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.950375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.950450 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.950465 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.950489 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:30 crc kubenswrapper[4865]: I0126 16:55:30.950503 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:30Z","lastTransitionTime":"2026-01-26T16:55:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.054130 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.054199 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.054213 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.054241 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.054255 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.157288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.157348 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.157358 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.157383 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.157396 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.261059 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.261127 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.261138 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.261161 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.261180 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.364140 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.364189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.364200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.364219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.364232 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.413392 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 16:24:08.23164679 +0000 UTC Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.466700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.466773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.466785 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.466813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.466832 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.569813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.569872 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.569890 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.569910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.569927 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.672778 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.672820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.672827 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.672840 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.672849 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.775493 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.775529 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.775542 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.775558 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.775570 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.877774 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.877814 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.877825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.877841 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.877852 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.980469 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.980517 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.980531 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.980549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:31 crc kubenswrapper[4865]: I0126 16:55:31.980560 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:31Z","lastTransitionTime":"2026-01-26T16:55:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.083218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.083347 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.083369 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.083397 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.083416 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.186763 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.186874 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.186893 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.186916 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.186933 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.289362 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.289415 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.289429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.289447 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.289460 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.357537 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.357571 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.357544 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.357640 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:32 crc kubenswrapper[4865]: E0126 16:55:32.357844 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:32 crc kubenswrapper[4865]: E0126 16:55:32.357926 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:32 crc kubenswrapper[4865]: E0126 16:55:32.358145 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:32 crc kubenswrapper[4865]: E0126 16:55:32.358312 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.392641 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.392689 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.392700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.392754 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.392768 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.414250 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 03:02:12.359453072 +0000 UTC Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.495459 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.495503 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.495515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.495543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.495557 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.598354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.598404 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.598420 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.598437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.598448 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.702025 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.702091 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.702112 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.702142 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.702164 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.804565 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.804704 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.804716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.804731 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.804739 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.907554 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.907611 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.907631 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.907658 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:32 crc kubenswrapper[4865]: I0126 16:55:32.907674 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:32Z","lastTransitionTime":"2026-01-26T16:55:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.010490 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.010559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.010572 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.010588 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.010603 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.113833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.113880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.113890 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.113906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.113919 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.216668 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.216726 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.216736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.216749 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.216759 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.259229 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.271921 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.285150 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.301722 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.312238 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.319495 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.319541 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.319553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.319571 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.319583 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.324073 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.336529 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.353300 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.365775 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.377597 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.394638 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.407040 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.415131 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 01:01:52.859686176 +0000 UTC Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.415984 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.422964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.423039 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.423053 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.423071 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.423108 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.433747 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.446107 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.456818 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.471533 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.483367 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:33Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.525536 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.525601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.525616 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.525637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.525648 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.629282 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.629335 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.629348 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.629368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.629383 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.732662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.732705 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.732716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.732736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.732749 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.835692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.835747 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.835760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.835780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.835796 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.938812 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.938848 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.938861 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.938877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:33 crc kubenswrapper[4865]: I0126 16:55:33.938886 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:33Z","lastTransitionTime":"2026-01-26T16:55:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.042091 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.042178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.042202 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.042231 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.042252 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.144216 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.144281 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.144293 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.144309 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.144321 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.163851 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.164165 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:56:06.164086884 +0000 UTC m=+93.747972481 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.250119 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.250159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.250170 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.250187 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.250198 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.265025 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.265088 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.265117 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.265161 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265218 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265246 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265248 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265272 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265286 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265305 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:56:06.265289038 +0000 UTC m=+93.849174625 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265323 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:56:06.265315669 +0000 UTC m=+93.849201256 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265336 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:56:06.265330329 +0000 UTC m=+93.849215916 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265354 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265391 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265406 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.265475 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:56:06.265453232 +0000 UTC m=+93.849338879 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.352555 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.352598 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.352619 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.352640 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.352653 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.356895 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.356969 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.357193 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.357319 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.357820 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.357959 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.358097 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.358155 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.369202 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.380702 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.394789 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.407201 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.415424 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 15:28:34.093291724 +0000 UTC Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.420897 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.433031 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.447452 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.456169 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.456208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.456219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.456233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.456242 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.459429 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.472813 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.485648 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.497111 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.515925 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.531196 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.546397 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.561527 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.563618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.563778 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.563868 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.563979 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.564105 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.572775 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.583035 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:34Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.667330 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.667836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.667918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.667988 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.668085 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.768834 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.769054 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: E0126 16:55:34.769140 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:55:50.769111717 +0000 UTC m=+78.352997304 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.771089 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.771212 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.771284 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.771356 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.771437 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.874340 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.874376 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.874388 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.874405 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.874419 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.976801 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.977177 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.977250 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.977323 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:34 crc kubenswrapper[4865]: I0126 16:55:34.977382 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:34Z","lastTransitionTime":"2026-01-26T16:55:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.080064 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.080101 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.080111 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.080127 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.080140 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.182848 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.182911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.182924 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.182946 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.182961 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.285901 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.285946 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.285956 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.285974 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.285992 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.389449 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.389493 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.389506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.389524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.389535 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.415962 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 20:10:55.063967606 +0000 UTC Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.492207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.492256 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.492267 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.492286 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.492299 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.595769 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.595829 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.595852 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.595881 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.595904 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.699761 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.700055 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.700073 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.700097 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.700116 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.802892 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.802957 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.802975 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.803036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.803095 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.906684 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.906749 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.906771 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.906797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:35 crc kubenswrapper[4865]: I0126 16:55:35.906814 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:35Z","lastTransitionTime":"2026-01-26T16:55:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.009713 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.009779 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.009795 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.009813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.009828 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.113057 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.113104 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.113115 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.113131 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.113140 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.215773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.215825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.215833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.215847 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.215857 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.318144 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.318216 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.318229 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.318249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.318264 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.357666 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.357714 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.357683 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.357811 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:36 crc kubenswrapper[4865]: E0126 16:55:36.357912 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:36 crc kubenswrapper[4865]: E0126 16:55:36.358272 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:36 crc kubenswrapper[4865]: E0126 16:55:36.358416 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:36 crc kubenswrapper[4865]: E0126 16:55:36.358544 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.416510 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 20:15:22.697224639 +0000 UTC Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.420541 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.420596 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.420679 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.420713 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.420737 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.523596 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.523683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.523708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.523751 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.523816 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.626533 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.626631 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.626650 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.626720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.626739 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.730540 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.730579 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.730592 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.730613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.730624 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.833806 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.833873 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.833883 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.833898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.833907 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.937467 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.937508 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.937520 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.937536 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:36 crc kubenswrapper[4865]: I0126 16:55:36.937547 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:36Z","lastTransitionTime":"2026-01-26T16:55:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.040465 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.040510 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.040521 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.040543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.040553 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.143181 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.143228 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.143238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.143253 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.143262 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.246021 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.246064 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.246073 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.246087 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.246098 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.384549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.384640 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.384662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.384717 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.384731 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.417025 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 02:47:08.858597007 +0000 UTC Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.487662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.487724 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.487797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.487824 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.487841 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.590711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.590745 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.590753 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.590767 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.590778 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.693561 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.694164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.694178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.694200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.694212 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.801864 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.801911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.801921 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.801938 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.801948 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.905118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.905172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.905181 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.905196 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:37 crc kubenswrapper[4865]: I0126 16:55:37.905205 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:37Z","lastTransitionTime":"2026-01-26T16:55:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.009906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.009957 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.009967 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.009983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.010012 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.112592 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.112630 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.112639 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.112653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.112664 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.216978 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.217046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.217061 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.217084 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.217098 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.320308 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.320376 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.320401 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.320425 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.320442 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.358360 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:38 crc kubenswrapper[4865]: E0126 16:55:38.358543 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.358804 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:38 crc kubenswrapper[4865]: E0126 16:55:38.358876 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.360021 4865 scope.go:117] "RemoveContainer" containerID="22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.360358 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:38 crc kubenswrapper[4865]: E0126 16:55:38.360428 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.360508 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:38 crc kubenswrapper[4865]: E0126 16:55:38.360568 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.417889 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 08:04:10.336786925 +0000 UTC Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.423117 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.423174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.423192 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.423214 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.423225 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.526898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.526940 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.526951 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.526967 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.526978 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.629819 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.629872 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.629888 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.629910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.629923 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.732857 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.732900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.732911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.732927 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.732937 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.835962 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.836018 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.836033 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.836051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.836064 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.949645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.949698 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.949715 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.949740 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:38 crc kubenswrapper[4865]: I0126 16:55:38.949757 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:38Z","lastTransitionTime":"2026-01-26T16:55:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.052901 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.052953 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.052966 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.052987 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.053026 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.156839 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.156911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.156924 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.156943 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.156957 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.260062 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.260118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.260131 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.260153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.260169 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.363033 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.363092 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.363106 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.363128 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.363141 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.392710 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/1.log" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.396473 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.397053 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.435092 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 09:46:21.098743833 +0000 UTC Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.435336 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.450775 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.454762 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.454802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.454815 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.454830 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.454843 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.467699 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.468227 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.473049 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.473102 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.473116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.473143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.473157 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.486067 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.491223 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.495949 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.496043 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.496059 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.496094 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.496112 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.503366 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.509350 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.521179 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.521224 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.521236 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.521257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.521272 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.525913 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.540718 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.546525 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.546571 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.546586 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.546607 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.546622 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.564254 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.581356 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: E0126 16:55:39.581484 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.584013 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.584053 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.584063 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.584086 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.584100 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.601407 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.617294 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.633103 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.649730 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.662897 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.678403 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.686266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.686301 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.686319 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.686340 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.686353 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.696812 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.713746 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.729976 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.744310 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.788870 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.788940 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.788957 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.789017 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.789040 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.891912 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.891970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.891982 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.892024 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.892040 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.995983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.996057 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.996066 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.996084 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:39 crc kubenswrapper[4865]: I0126 16:55:39.996096 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:39Z","lastTransitionTime":"2026-01-26T16:55:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.098659 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.098706 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.098716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.098735 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.098748 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.202512 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.202570 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.202586 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.202607 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.202621 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.305116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.305174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.305186 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.305207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.305220 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.356896 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.356937 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.357103 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:40 crc kubenswrapper[4865]: E0126 16:55:40.357156 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.357287 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:40 crc kubenswrapper[4865]: E0126 16:55:40.357341 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:40 crc kubenswrapper[4865]: E0126 16:55:40.357474 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:40 crc kubenswrapper[4865]: E0126 16:55:40.357655 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.404446 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/2.log" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.405268 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/1.log" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.406944 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.407007 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.407021 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.407037 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.407048 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.408514 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" exitCode=1 Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.408560 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.408609 4865 scope.go:117] "RemoveContainer" containerID="22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.409190 4865 scope.go:117] "RemoveContainer" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" Jan 26 16:55:40 crc kubenswrapper[4865]: E0126 16:55:40.409367 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.425367 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.434981 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.435208 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 06:50:07.529468929 +0000 UTC Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.444210 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.454661 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.464715 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.476210 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.490327 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.505641 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.510073 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.510177 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.510194 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.510246 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.510265 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.518150 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.538356 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22f8a884a773f009ea5c522c8ecb36c28559352f219bc6d99d8351571fd007cb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:24Z\\\",\\\"message\\\":\\\"Informer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:24Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:24.318522 6347 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cluster\\\\\\\", UUID:\\\\\\\"7715118b-bb1b-400a-803e-7ab2cc3eeec0\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-ingress-canary/ingress-canary\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-ingress-canary/ingress-canary_TCP_cl\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:23Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.551985 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.565586 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.576326 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.590056 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.601179 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613213 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613523 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613551 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613564 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.613628 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.624365 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:40Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.715976 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.716044 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.716055 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.716080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.716097 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.818674 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.818744 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.818761 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.818782 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.818794 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.922038 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.922085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.922096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.922125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:40 crc kubenswrapper[4865]: I0126 16:55:40.922138 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:40Z","lastTransitionTime":"2026-01-26T16:55:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.024853 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.024923 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.024942 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.024970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.025014 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.127741 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.127793 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.127802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.127820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.127829 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.230662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.230704 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.230715 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.230731 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.230741 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.333454 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.333539 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.333563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.333664 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.333695 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.414871 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/2.log" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.419138 4865 scope.go:117] "RemoveContainer" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" Jan 26 16:55:41 crc kubenswrapper[4865]: E0126 16:55:41.419307 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.434052 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.435318 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 07:16:17.251554286 +0000 UTC Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.436714 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.436769 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.436787 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.436819 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.436837 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.443836 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.461885 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.474459 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.484911 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.495183 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.504783 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.517331 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.532586 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.540224 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.540269 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.540285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.540306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.540321 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.551567 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.566681 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.582074 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.601014 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.615618 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.630891 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.643808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.643900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.643923 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.643950 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.643973 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.647499 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.748651 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.748798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.748811 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.748830 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.748849 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.759942 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:41Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.852110 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.852155 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.852168 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.852189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.852202 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.955311 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.955378 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.955395 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.955416 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:41 crc kubenswrapper[4865]: I0126 16:55:41.955431 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:41Z","lastTransitionTime":"2026-01-26T16:55:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.058257 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.058318 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.058655 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.058799 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.058821 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.162483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.162544 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.162566 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.162613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.162626 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.266103 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.266199 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.266212 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.266261 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.266277 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.357175 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.357175 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.357259 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.357366 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:42 crc kubenswrapper[4865]: E0126 16:55:42.357482 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:42 crc kubenswrapper[4865]: E0126 16:55:42.357602 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:42 crc kubenswrapper[4865]: E0126 16:55:42.357669 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:42 crc kubenswrapper[4865]: E0126 16:55:42.357792 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.369028 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.369109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.369129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.369157 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.369174 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.435735 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 00:14:08.862380963 +0000 UTC Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.472697 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.472744 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.472757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.472774 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.472787 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.577254 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.577317 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.577339 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.577368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.577386 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.682636 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.682706 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.682718 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.682738 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.682753 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.785793 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.785868 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.785889 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.785918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.785936 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.889359 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.889412 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.889424 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.889446 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.889460 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.992719 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.992780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.992796 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.992828 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:42 crc kubenswrapper[4865]: I0126 16:55:42.992847 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:42Z","lastTransitionTime":"2026-01-26T16:55:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.096437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.096488 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.096499 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.096517 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.096529 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.200054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.200102 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.200114 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.200133 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.200145 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.303538 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.303614 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.303635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.303655 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.303666 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.406896 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.407027 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.407043 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.407074 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.407327 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.436124 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 22:39:26.102596773 +0000 UTC Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.510191 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.510249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.510260 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.510282 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.510293 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.613525 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.613587 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.613603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.613629 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.613653 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.716452 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.716517 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.716526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.716544 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.716559 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.819409 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.819473 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.819483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.819499 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.819510 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.921858 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.921918 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.921930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.921948 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:43 crc kubenswrapper[4865]: I0126 16:55:43.921958 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:43Z","lastTransitionTime":"2026-01-26T16:55:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.025321 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.025365 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.025374 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.025391 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.025403 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.128737 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.128797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.128810 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.128827 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.128838 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.232352 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.232421 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.232436 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.232455 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.232468 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.335195 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.335269 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.335282 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.335306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.335321 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.357803 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.357904 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:44 crc kubenswrapper[4865]: E0126 16:55:44.357965 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.357985 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.357928 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:44 crc kubenswrapper[4865]: E0126 16:55:44.358396 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:44 crc kubenswrapper[4865]: E0126 16:55:44.358349 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:44 crc kubenswrapper[4865]: E0126 16:55:44.358536 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.373141 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.391745 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.407194 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.419732 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.434314 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.436486 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 13:05:52.063145409 +0000 UTC Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.439770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.439808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.439825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.439843 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.439853 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.448626 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.467124 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.484131 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.499510 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.513685 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.534969 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.543157 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.543214 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.543226 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.543244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.543256 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.555164 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.570295 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.589783 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.609851 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.623182 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.637497 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:44Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.647552 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.647614 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.647627 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.647649 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.647664 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.750919 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.750967 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.750977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.751017 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.751032 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.854242 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.854280 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.854289 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.854330 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.854351 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.956710 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.956753 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.956765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.956782 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:44 crc kubenswrapper[4865]: I0126 16:55:44.956795 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:44Z","lastTransitionTime":"2026-01-26T16:55:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.059541 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.059591 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.059600 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.059656 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.059670 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.163198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.163265 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.163278 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.163381 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.163406 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.265882 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.265937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.265950 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.265971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.265983 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.369618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.369672 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.369689 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.369708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.369732 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.438079 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 13:05:27.913837202 +0000 UTC Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.473118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.473165 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.473178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.473195 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.473206 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.577627 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.577714 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.577727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.577754 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.577768 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.681701 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.681748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.681763 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.681785 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.681799 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.785939 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.786012 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.786026 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.786044 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.786056 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.889832 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.889877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.889891 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.889914 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.889932 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.994126 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.994173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.994185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.994205 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:45 crc kubenswrapper[4865]: I0126 16:55:45.994220 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:45Z","lastTransitionTime":"2026-01-26T16:55:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.097427 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.097490 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.097503 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.097523 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.097536 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.200902 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.200978 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.201006 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.201023 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.201037 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.303558 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.303595 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.303604 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.303620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.303633 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.357116 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.357216 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.357286 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:46 crc kubenswrapper[4865]: E0126 16:55:46.357302 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.357450 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:46 crc kubenswrapper[4865]: E0126 16:55:46.357442 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:46 crc kubenswrapper[4865]: E0126 16:55:46.357534 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:46 crc kubenswrapper[4865]: E0126 16:55:46.357600 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.406956 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.407054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.407067 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.407087 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.407099 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.438289 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 09:38:33.204894614 +0000 UTC Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.510982 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.511068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.511078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.511099 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.511111 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.615784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.615863 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.615877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.615904 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.615948 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.719530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.719578 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.719587 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.719622 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.719635 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.823766 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.823822 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.823833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.823854 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.823866 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.927237 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.927297 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.927314 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.927339 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:46 crc kubenswrapper[4865]: I0126 16:55:46.927385 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:46Z","lastTransitionTime":"2026-01-26T16:55:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.030552 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.030619 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.030637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.030668 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.030688 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.134610 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.134728 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.134743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.134762 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.134774 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.238184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.238258 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.238276 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.238300 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.238314 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.341536 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.341591 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.341601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.341617 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.341628 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.438536 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 15:41:34.509499842 +0000 UTC Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.444541 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.444603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.444613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.444630 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.444642 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.547316 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.547386 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.547399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.547419 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.547434 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.651962 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.652053 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.652072 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.652094 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.652116 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.755109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.755208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.755244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.755310 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.755336 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.859144 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.859211 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.859225 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.859247 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.859300 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.962636 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.962714 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.962738 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.962767 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:47 crc kubenswrapper[4865]: I0126 16:55:47.962802 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:47Z","lastTransitionTime":"2026-01-26T16:55:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.065814 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.065880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.065894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.065914 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.065928 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.168686 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.168753 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.168773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.168798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.168816 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.271515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.271559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.271571 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.271588 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.271605 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.357790 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.357793 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.357786 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.357823 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:48 crc kubenswrapper[4865]: E0126 16:55:48.358094 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:48 crc kubenswrapper[4865]: E0126 16:55:48.358342 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:48 crc kubenswrapper[4865]: E0126 16:55:48.358654 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:48 crc kubenswrapper[4865]: E0126 16:55:48.358429 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.374266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.374344 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.374354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.374375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.374388 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.439516 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 04:37:22.577824105 +0000 UTC Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.477764 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.477850 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.477866 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.477887 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.477898 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.580854 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.580898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.580911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.580929 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.580943 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.683020 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.683068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.683079 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.683096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.683107 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.786620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.786680 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.786702 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.786720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.786730 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.889832 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.889920 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.889944 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.889977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.890056 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.992711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.992756 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.992766 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.992779 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:48 crc kubenswrapper[4865]: I0126 16:55:48.992789 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:48Z","lastTransitionTime":"2026-01-26T16:55:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.096255 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.096397 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.096420 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.096443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.096460 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.200161 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.200223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.200237 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.200256 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.200272 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.302952 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.303055 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.303071 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.303096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.303111 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.405973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.406054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.406067 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.406087 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.406099 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.440625 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 17:49:35.563698664 +0000 UTC Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.508962 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.509070 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.509086 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.509109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.509124 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.612378 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.612444 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.612463 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.612487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.612506 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.715080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.715125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.715137 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.715153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.715166 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.767375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.767530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.767559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.767583 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.767602 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.781318 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:49Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.786898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.786973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.787046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.787082 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.787108 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.804952 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:49Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.810399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.810553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.810585 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.810675 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.810762 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.829199 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:49Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.836638 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.836750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.836773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.836842 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.836863 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.854659 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:49Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.859847 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.859917 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.859931 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.859952 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.859983 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.876612 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:49Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:49 crc kubenswrapper[4865]: E0126 16:55:49.876938 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.879809 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.879884 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.879898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.879915 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.879947 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.983709 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.983758 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.983768 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.983813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:49 crc kubenswrapper[4865]: I0126 16:55:49.983830 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:49Z","lastTransitionTime":"2026-01-26T16:55:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.088233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.088288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.088300 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.088344 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.088359 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.191328 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.191399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.191418 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.191444 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.191464 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.294350 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.294421 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.294439 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.294464 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.294485 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.357212 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.357351 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.357213 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.357476 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.357529 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.357618 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.357702 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.357760 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.397741 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.397802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.397822 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.397845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.397863 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.441412 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 20:26:47.049401633 +0000 UTC Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.502125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.502232 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.502263 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.502295 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.502502 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.605814 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.605867 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.605877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.605894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.605905 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.709799 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.709855 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.709868 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.709889 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.709902 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.812651 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.812711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.812724 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.812745 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.812757 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.868069 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.868528 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:50 crc kubenswrapper[4865]: E0126 16:55:50.868710 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:56:22.868678174 +0000 UTC m=+110.452563761 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.916901 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.916944 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.916953 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.916969 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:50 crc kubenswrapper[4865]: I0126 16:55:50.916982 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:50Z","lastTransitionTime":"2026-01-26T16:55:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.020806 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.020859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.020909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.020931 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.020944 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.124648 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.124714 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.124731 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.124757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.124775 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.227879 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.227937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.227946 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.227965 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.227979 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.331521 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.331572 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.331582 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.331598 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.331610 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.436080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.436235 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.436266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.436308 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.436338 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.442679 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 08:17:42.364533398 +0000 UTC Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.539432 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.539506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.539519 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.539544 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.539559 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.642807 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.642857 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.642867 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.642887 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.642899 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.746594 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.746643 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.746653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.746672 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.746682 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.849348 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.849399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.849410 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.849429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.849444 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.952749 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.952801 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.952810 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.952828 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:51 crc kubenswrapper[4865]: I0126 16:55:51.952838 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:51Z","lastTransitionTime":"2026-01-26T16:55:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.056074 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.056118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.056129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.056147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.056158 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.158765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.158886 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.158902 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.158925 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.158948 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.262166 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.262224 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.262240 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.262267 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.262280 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.357396 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.357496 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:52 crc kubenswrapper[4865]: E0126 16:55:52.357589 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.357532 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.357496 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:52 crc kubenswrapper[4865]: E0126 16:55:52.357703 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:52 crc kubenswrapper[4865]: E0126 16:55:52.357768 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:52 crc kubenswrapper[4865]: E0126 16:55:52.357837 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.365269 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.365306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.365315 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.365331 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.365343 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.443809 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 04:44:04.837336415 +0000 UTC Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.467569 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.467616 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.467626 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.467643 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.467655 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.571837 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.571897 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.571907 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.571927 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.571939 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.675174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.675264 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.675276 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.675301 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.675315 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.778363 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.778419 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.778429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.778449 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.778461 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.881884 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.881969 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.881981 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.882018 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.882034 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.985215 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.985279 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.985291 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.985312 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:52 crc kubenswrapper[4865]: I0126 16:55:52.985327 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:52Z","lastTransitionTime":"2026-01-26T16:55:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.088069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.088135 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.088152 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.088181 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.088201 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.191933 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.192014 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.192028 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.192050 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.192061 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.296880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.296970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.297036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.297083 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.297098 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.401011 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.401071 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.401085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.401105 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.401119 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.444575 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 00:46:49.002392963 +0000 UTC Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.504189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.504289 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.504305 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.504330 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.504343 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.606412 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.606491 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.606500 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.606520 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.606531 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.709973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.710036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.710048 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.710064 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.710095 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.813238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.813645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.813764 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.813836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.813905 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.917487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.918618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.918741 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.918901 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:53 crc kubenswrapper[4865]: I0126 16:55:53.919039 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:53Z","lastTransitionTime":"2026-01-26T16:55:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.027381 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.027430 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.027442 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.027463 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.027477 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.131190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.131249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.131263 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.131287 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.131337 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.235524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.235572 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.235584 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.235605 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.235617 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.339479 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.339547 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.339563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.339585 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.339600 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.357437 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.357768 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:54 crc kubenswrapper[4865]: E0126 16:55:54.357914 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.357966 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.357928 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:54 crc kubenswrapper[4865]: E0126 16:55:54.358158 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:54 crc kubenswrapper[4865]: E0126 16:55:54.358226 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:54 crc kubenswrapper[4865]: E0126 16:55:54.358270 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.361572 4865 scope.go:117] "RemoveContainer" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" Jan 26 16:55:54 crc kubenswrapper[4865]: E0126 16:55:54.362086 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.374069 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.389928 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.404135 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.420600 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.437488 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.442600 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.442640 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.442653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.442670 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.442682 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.446681 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 13:31:38.228557885 +0000 UTC Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.453880 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.471559 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.489330 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.505494 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.520840 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.536672 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.551105 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.552872 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.552898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.552906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.552922 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.552933 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.575817 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.593376 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.609089 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.621567 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.639353 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:54Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.655276 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.655312 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.655323 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.655341 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.655352 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.758809 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.758851 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.758860 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.758880 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.758892 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.862435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.862497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.862510 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.862532 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.862546 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.966190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.966248 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.966260 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.966287 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:54 crc kubenswrapper[4865]: I0126 16:55:54.966304 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:54Z","lastTransitionTime":"2026-01-26T16:55:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.069377 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.069476 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.069530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.069555 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.069634 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.171906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.171945 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.171956 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.171972 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.171983 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.276359 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.276429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.276445 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.276468 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.276483 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.379116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.379174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.379190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.379215 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.379233 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.447757 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 22:26:22.696132459 +0000 UTC Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.476713 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/0.log" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.476774 4865 generic.go:334] "Generic (PLEG): container finished" podID="d5c89572-d108-4b35-ab46-dfbbc8b7e3be" containerID="82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7" exitCode=1 Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.476822 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerDied","Data":"82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.477341 4865 scope.go:117] "RemoveContainer" containerID="82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.481426 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.481468 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.481479 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.481498 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.481508 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.499826 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.512695 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.532195 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.544930 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.555735 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.569657 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.583726 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.589152 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.589203 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.589214 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.589233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.589257 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.598272 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.612847 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.627259 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.644376 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.659663 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.673843 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.689888 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.692273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.692305 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.692314 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.692333 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.692346 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.704954 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.723817 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.737947 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:55Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.795710 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.795746 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.795754 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.795770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.795784 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.900208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.900266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.900285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.900313 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:55 crc kubenswrapper[4865]: I0126 16:55:55.900329 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:55Z","lastTransitionTime":"2026-01-26T16:55:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.003721 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.003783 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.003796 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.003815 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.003827 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.106644 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.106765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.106900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.106937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.106958 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.210387 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.210464 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.210485 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.210514 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.210533 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.313525 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.313594 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.313611 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.313634 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.313652 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.357421 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.357459 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.357534 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:56 crc kubenswrapper[4865]: E0126 16:55:56.357625 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.357634 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:56 crc kubenswrapper[4865]: E0126 16:55:56.357805 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:56 crc kubenswrapper[4865]: E0126 16:55:56.357854 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:56 crc kubenswrapper[4865]: E0126 16:55:56.357929 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.416563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.416613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.416623 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.416645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.416662 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.448117 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 18:09:49.675393999 +0000 UTC Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.484229 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/0.log" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.484308 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerStarted","Data":"d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.504942 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.519894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.520706 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.520721 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.520748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.520761 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.524923 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.541582 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.559911 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.572492 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.584725 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.595968 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.608151 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.618852 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.623427 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.623472 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.623482 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.623501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.623513 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.631884 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.645273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.656444 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.668863 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.684493 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.710456 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727517 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727568 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727579 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727598 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727520 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.727611 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.747960 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:56Z is after 2025-08-24T17:21:41Z" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.830875 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.830933 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.830945 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.830970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.830986 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.934506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.934755 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.934765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.934787 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:56 crc kubenswrapper[4865]: I0126 16:55:56.934799 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:56Z","lastTransitionTime":"2026-01-26T16:55:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.037955 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.038112 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.038129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.038147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.038164 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.141400 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.141470 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.141487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.141510 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.141524 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.244414 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.244603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.244615 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.244635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.244649 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.347377 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.347432 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.347447 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.347469 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.347485 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.448770 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 08:44:58.487561097 +0000 UTC Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.450743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.450775 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.450786 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.450805 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.450819 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.553907 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.553973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.554005 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.554028 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.554047 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.657435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.657497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.657512 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.657537 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.657554 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.760234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.760310 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.760322 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.760342 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.760355 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.864913 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.864973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.864983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.865021 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.865034 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.968378 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.968430 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.968441 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.968463 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:57 crc kubenswrapper[4865]: I0126 16:55:57.968475 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:57Z","lastTransitionTime":"2026-01-26T16:55:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.071502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.071551 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.071563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.071579 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.071589 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.174674 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.174723 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.174734 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.174756 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.174770 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.277823 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.277874 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.277887 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.277906 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.277916 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.357695 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.357786 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.357786 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.357812 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:55:58 crc kubenswrapper[4865]: E0126 16:55:58.357903 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:55:58 crc kubenswrapper[4865]: E0126 16:55:58.358018 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:55:58 crc kubenswrapper[4865]: E0126 16:55:58.358071 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:55:58 crc kubenswrapper[4865]: E0126 16:55:58.358130 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.380486 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.380542 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.380551 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.380568 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.380578 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.449199 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 16:48:29.343839831 +0000 UTC Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.483974 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.484031 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.484041 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.484058 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.484068 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.586616 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.586690 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.586700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.586718 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.586733 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.690691 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.690791 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.690822 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.690896 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.690932 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.794189 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.794246 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.794262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.794286 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.794302 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.897107 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.897184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.897198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.897220 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:58 crc kubenswrapper[4865]: I0126 16:55:58.897234 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:58Z","lastTransitionTime":"2026-01-26T16:55:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.000489 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.000569 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.000581 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.000601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.000616 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.103863 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.103923 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.103936 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.103958 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.104019 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.207198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.207238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.207247 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.207267 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.207277 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.310662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.310722 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.310732 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.310750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.310766 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.414413 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.414480 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.414495 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.414514 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.414532 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.449642 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 22:54:51.676964768 +0000 UTC Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.517752 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.517825 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.517836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.517856 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.517868 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.620843 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.620887 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.620897 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.620916 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.620927 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.724402 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.724451 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.724460 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.724480 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.724492 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.827909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.827980 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.828028 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.828057 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.828080 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.930962 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.931061 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.931081 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.931103 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:55:59 crc kubenswrapper[4865]: I0126 16:55:59.931125 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:55:59Z","lastTransitionTime":"2026-01-26T16:55:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.034780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.034834 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.034845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.034867 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.034880 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.043155 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.043185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.043197 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.043208 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.043216 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.063177 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:00Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.068688 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.068735 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.068765 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.068789 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.068802 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.089876 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:00Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.095794 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.095862 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.095876 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.095901 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.095915 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.117491 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:00Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.123612 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.123653 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.123663 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.123683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.123695 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.140070 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:00Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.145631 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.145696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.145715 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.145737 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.145754 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.162085 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:00Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:00Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.162240 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.164818 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.164877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.164895 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.164917 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.164932 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.268052 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.268105 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.268125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.268153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.268169 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.357221 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.357221 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.357213 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.357242 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.357381 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.357672 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.357724 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:00 crc kubenswrapper[4865]: E0126 16:56:00.357804 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.370857 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.370928 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.370947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.370971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.370987 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.450431 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 11:32:58.344321977 +0000 UTC Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.475015 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.475094 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.475113 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.475140 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.475161 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.577856 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.577904 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.577917 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.577939 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.577953 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.680876 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.680942 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.680954 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.680977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.681011 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.783515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.783602 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.783612 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.783631 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.783643 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.886529 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.886620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.886632 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.886655 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.886673 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.991301 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.991370 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.991382 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.991402 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:00 crc kubenswrapper[4865]: I0126 16:56:00.991416 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:00Z","lastTransitionTime":"2026-01-26T16:56:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.094441 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.094497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.094508 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.094527 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.094541 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.198415 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.198510 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.198530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.198563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.198595 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.301856 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.301909 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.301922 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.301941 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.301955 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.405549 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.405613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.405625 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.405645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.405657 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.451068 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 10:09:31.575455347 +0000 UTC Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.508178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.508209 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.508218 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.508234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.508245 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.611039 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.611122 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.611143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.611166 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.611179 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.714136 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.714200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.714212 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.714249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.714261 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.818018 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.818071 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.818082 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.818102 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.818116 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.921984 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.922055 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.922068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.922095 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:01 crc kubenswrapper[4865]: I0126 16:56:01.922109 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:01Z","lastTransitionTime":"2026-01-26T16:56:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.024638 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.024696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.024711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.024732 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.024746 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.128699 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.128739 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.128750 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.128770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.128781 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.232127 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.232172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.232184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.232199 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.232209 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.335051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.335112 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.335124 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.335145 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.335156 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.357701 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.357747 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.357795 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:02 crc kubenswrapper[4865]: E0126 16:56:02.357877 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.358055 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:02 crc kubenswrapper[4865]: E0126 16:56:02.358149 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:02 crc kubenswrapper[4865]: E0126 16:56:02.358227 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:02 crc kubenswrapper[4865]: E0126 16:56:02.358175 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.437733 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.437865 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.437878 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.437899 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.437913 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.451765 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 01:50:34.262087079 +0000 UTC Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.541085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.541128 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.541138 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.541155 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.541165 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.644341 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.644425 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.644445 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.644497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.644516 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.746868 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.746934 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.746946 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.746970 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.746982 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.849984 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.850039 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.850048 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.850065 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.850076 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.956543 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.956587 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.956598 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.956620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:02 crc kubenswrapper[4865]: I0126 16:56:02.956632 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:02Z","lastTransitionTime":"2026-01-26T16:56:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.060068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.060143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.060159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.060178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.060189 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.163249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.163288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.163298 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.163321 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.163332 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.266178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.266233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.266245 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.266285 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.266298 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.369354 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.371849 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.371904 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.371917 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.371937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.372308 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.452563 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 08:12:11.599662502 +0000 UTC Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.476579 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.476635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.476646 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.476666 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.476680 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.580206 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.580329 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.580385 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.580413 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.580437 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.683632 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.683687 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.683697 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.683723 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.683734 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.787004 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.787044 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.787052 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.787069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.787079 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.889873 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.889948 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.889986 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.890037 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.890076 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.993621 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.993692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.993705 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.993726 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:03 crc kubenswrapper[4865]: I0126 16:56:03.993757 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:03Z","lastTransitionTime":"2026-01-26T16:56:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.098438 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.098488 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.098512 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.098533 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.098546 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.202194 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.202259 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.202270 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.202289 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.202304 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.305612 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.305672 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.305684 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.305706 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.305720 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.357460 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.357507 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.357510 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.357638 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:04 crc kubenswrapper[4865]: E0126 16:56:04.357637 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:04 crc kubenswrapper[4865]: E0126 16:56:04.357767 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:04 crc kubenswrapper[4865]: E0126 16:56:04.357836 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:04 crc kubenswrapper[4865]: E0126 16:56:04.357900 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.376365 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.395918 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.408369 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.408426 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.408435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.408453 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.408466 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.419244 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.435189 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.452713 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 20:46:40.460291036 +0000 UTC Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.452828 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.471333 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.492841 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.510529 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.511646 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.511704 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.511720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.511757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.511776 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.525571 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.539858 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.552416 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.567211 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.582326 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.599161 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614105 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614438 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614470 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614481 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614497 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.614508 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.627044 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.641416 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.654923 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:04Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.717636 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.717698 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.717713 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.717741 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.717756 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.821338 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.821381 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.821391 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.821407 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.821419 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.924190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.924243 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.924252 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.924270 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:04 crc kubenswrapper[4865]: I0126 16:56:04.924284 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:04Z","lastTransitionTime":"2026-01-26T16:56:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.027872 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.027926 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.027938 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.027958 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.027974 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.130643 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.130709 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.130726 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.130755 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.130774 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.234305 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.234573 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.235207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.235390 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.235412 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.339652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.339778 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.339796 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.339820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.339836 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.358041 4865 scope.go:117] "RemoveContainer" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.443172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.443223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.443233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.443252 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.443264 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.453487 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 19:04:47.61539621 +0000 UTC Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.545787 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.545826 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.545838 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.545859 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.545873 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.649314 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.649372 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.649389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.649412 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.649432 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.758360 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.758460 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.758473 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.758516 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.758529 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.862032 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.862079 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.862130 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.862154 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.862166 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.966443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.966523 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.966537 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.966559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:05 crc kubenswrapper[4865]: I0126 16:56:05.966575 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:05Z","lastTransitionTime":"2026-01-26T16:56:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.075452 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.075504 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.075516 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.075535 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.075548 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.179280 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.179328 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.179340 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.179360 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.179375 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.256545 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.256817 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.256758237 +0000 UTC m=+157.840643824 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.283311 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.283393 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.283413 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.283437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.283452 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357146 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357259 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.357340 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357376 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357355 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.357516 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.357643 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.357757 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357800 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357860 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.357891 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.358127 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358173 4865 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358178 4865 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358235 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.358214202 +0000 UTC m=+157.942099779 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358298 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358321 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358336 4865 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358334 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358385 4865 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358413 4865 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358306 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.358295714 +0000 UTC m=+157.942181301 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358497 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.358468989 +0000 UTC m=+157.942354766 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:56:06 crc kubenswrapper[4865]: E0126 16:56:06.358527 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.35851852 +0000 UTC m=+157.942404107 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.376602 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.386595 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.386636 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.386647 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.386665 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.386679 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.453910 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 08:57:34.69722691 +0000 UTC Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.489944 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.489978 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.490005 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.490025 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.490038 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.525527 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/2.log" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.529150 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.529675 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.546540 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.559880 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.576497 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.592376 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.594748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.594811 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.594821 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.594840 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.594855 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.610525 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.640657 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.659717 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.682519 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:56:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.698205 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.698256 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.698280 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.698303 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.698315 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.708730 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.723769 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.786236 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.801772 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.801876 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.801898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.801950 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.801968 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.802824 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.834936 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e47e1f3-e364-439b-b53a-1be7c7855c0b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.851315 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.865355 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.885676 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.898816 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.910823 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.911927 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.911983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.912024 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.912042 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.912056 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:06Z","lastTransitionTime":"2026-01-26T16:56:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:06 crc kubenswrapper[4865]: I0126 16:56:06.921417 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:06Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.014560 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.014613 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.014627 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.014644 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.014655 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.122067 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.122115 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.122125 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.122143 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.122154 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.224568 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.224620 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.224632 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.224650 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.224666 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.327315 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.327401 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.327417 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.327469 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.327485 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.430954 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.431027 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.431038 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.431060 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.431070 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.455110 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 23:08:10.692896773 +0000 UTC Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.544337 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.544393 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.544419 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.544440 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.544455 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.647235 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.647270 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.647279 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.647313 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.647325 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.751174 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.751225 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.751237 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.751255 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.751268 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.854934 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.855011 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.855022 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.855041 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.855055 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.958722 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.958773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.958786 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.958809 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:07 crc kubenswrapper[4865]: I0126 16:56:07.958826 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:07Z","lastTransitionTime":"2026-01-26T16:56:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.062368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.062874 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.062891 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.062919 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.062936 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.165870 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.165949 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.165964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.166017 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.166041 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.268836 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.268935 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.268955 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.268979 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.269020 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.357735 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.357837 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.357887 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.357775 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:08 crc kubenswrapper[4865]: E0126 16:56:08.358088 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:08 crc kubenswrapper[4865]: E0126 16:56:08.358185 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:08 crc kubenswrapper[4865]: E0126 16:56:08.358461 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:08 crc kubenswrapper[4865]: E0126 16:56:08.363409 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.371094 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.371158 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.371173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.371191 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.371204 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.455338 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 06:25:29.299454326 +0000 UTC Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.474772 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.474835 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.474849 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.474870 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.474886 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.578141 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.578192 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.578204 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.578223 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.578240 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.680930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.681010 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.681022 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.681044 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.681058 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.784599 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.784674 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.784695 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.784717 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.784737 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.889354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.889413 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.889425 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.889450 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.889463 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.992524 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.992574 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.992587 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.992606 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:08 crc kubenswrapper[4865]: I0126 16:56:08.992617 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:08Z","lastTransitionTime":"2026-01-26T16:56:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.095692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.095745 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.095757 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.095776 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.095792 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.198982 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.199080 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.199093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.199118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.199136 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.303203 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.303256 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.303265 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.303283 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.303293 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.406142 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.406192 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.406210 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.406234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.406249 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.455655 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 03:24:14.228493241 +0000 UTC Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.509845 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.509884 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.509895 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.509915 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.509928 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.542315 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/3.log" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.543089 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/2.log" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.545980 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" exitCode=1 Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.546046 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.546095 4865 scope.go:117] "RemoveContainer" containerID="ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.547389 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 16:56:09 crc kubenswrapper[4865]: E0126 16:56:09.547670 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.562395 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.578615 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.592305 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.607674 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.617368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.617405 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.617414 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.617429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.617438 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.625390 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.639125 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.653495 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.663718 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.676185 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.690865 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.703970 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.720674 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.720727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.720740 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.720756 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.721135 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.727278 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:56:08Z\\\",\\\"message\\\":\\\" 6909 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.991381 6909 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0126 16:56:07.991404 6909 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0126 16:56:07.991419 6909 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:56:07.991425 6909 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:56:07.991450 6909 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 16:56:07.991487 6909 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:56:07.991518 6909 factory.go:656] Stopping watch factory\\\\nI0126 16:56:07.992231 6909 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.992248 6909 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:56:07.992268 6909 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:56:07.992275 6909 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:56:07.992330 6909 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:56:07.992335 6909 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:56:07.992603 6909 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:56:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.749982 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e47e1f3-e364-439b-b53a-1be7c7855c0b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.766273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.786457 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.816422 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.823487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.823544 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.823556 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.823575 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.823591 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.841280 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.855901 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.872938 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:09Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.926242 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.926290 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.926303 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.926320 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:09 crc kubenswrapper[4865]: I0126 16:56:09.926332 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:09Z","lastTransitionTime":"2026-01-26T16:56:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.029102 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.029173 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.029197 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.029234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.029257 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.132393 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.132493 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.132518 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.132548 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.132573 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.211414 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.211464 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.211475 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.211493 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.211508 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.235045 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.243191 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.243280 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.243301 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.243346 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.243368 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.292439 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.297319 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.297358 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.297371 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.297390 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.297404 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.317951 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.343338 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.343399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.343415 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.343436 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.343449 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.357235 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.357395 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.357596 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.357643 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.357757 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.357797 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.357911 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.357970 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.362880 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.367264 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.367307 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.367320 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.367337 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.367351 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.382876 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:10Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:10 crc kubenswrapper[4865]: E0126 16:56:10.383033 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.384437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.384489 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.384501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.384514 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.384524 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.455943 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 23:32:23.912856501 +0000 UTC Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.487262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.487368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.487391 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.487418 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.487437 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.551961 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/3.log" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.590770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.590868 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.590898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.590930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.590954 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.694631 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.694693 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.694711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.694735 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.694751 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.798854 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.798928 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.798958 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.799031 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.799060 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.902302 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.902383 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.902397 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.902414 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:10 crc kubenswrapper[4865]: I0126 16:56:10.902427 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:10Z","lastTransitionTime":"2026-01-26T16:56:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.005417 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.005490 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.005502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.005516 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.005527 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.108161 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.108228 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.108240 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.108260 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.108274 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.210808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.210874 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.210894 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.210913 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.210924 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.313144 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.313180 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.313190 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.313204 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.313214 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.415351 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.415406 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.415419 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.415435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.415447 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.456804 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 14:28:01.560041826 +0000 UTC Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.517911 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.517967 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.517980 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.518018 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.518033 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.620325 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.620360 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.620368 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.620383 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.620394 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.723919 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.724093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.724122 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.724156 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.724176 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.827302 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.827346 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.827355 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.827386 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.827400 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.930247 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.930328 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.930375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.930398 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:11 crc kubenswrapper[4865]: I0126 16:56:11.930412 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:11Z","lastTransitionTime":"2026-01-26T16:56:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.033751 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.033852 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.033877 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.033908 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.033928 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.137236 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.137293 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.137310 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.137330 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.137342 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.239945 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.240032 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.240046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.240069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.240080 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.342926 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.342964 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.342973 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.343002 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.343011 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.357294 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.357326 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.357405 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:12 crc kubenswrapper[4865]: E0126 16:56:12.357451 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.357399 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:12 crc kubenswrapper[4865]: E0126 16:56:12.357555 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:12 crc kubenswrapper[4865]: E0126 16:56:12.357622 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:12 crc kubenswrapper[4865]: E0126 16:56:12.357729 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.446160 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.446234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.446244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.446279 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.446289 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.457369 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 08:12:22.292620218 +0000 UTC Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.549523 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.549596 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.549608 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.549625 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.549636 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.653777 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.653857 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.653874 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.653898 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.653921 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.756905 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.756984 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.757060 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.757096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.757124 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.860491 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.860552 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.860563 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.860583 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.860595 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.964084 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.964153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.964169 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.964193 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:12 crc kubenswrapper[4865]: I0126 16:56:12.964208 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:12Z","lastTransitionTime":"2026-01-26T16:56:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.067866 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.067931 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.067947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.067965 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.067978 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.170800 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.170873 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.170886 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.170908 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.170922 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.273665 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.273716 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.273725 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.273738 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.273749 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.376538 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.376603 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.376618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.376652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.376668 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.458048 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 02:21:05.890305855 +0000 UTC Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.479213 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.479272 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.479286 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.479308 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.479326 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.583085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.583184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.583202 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.583225 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.583242 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.686354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.686481 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.686491 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.686504 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.686514 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.789826 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.789875 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.789885 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.789900 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.789911 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.893910 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.893972 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.893983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.894020 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.894036 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.997482 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.997567 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.997583 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.997609 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:13 crc kubenswrapper[4865]: I0126 16:56:13.997625 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:13Z","lastTransitionTime":"2026-01-26T16:56:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.100982 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.101086 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.101108 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.101140 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.101165 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.204621 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.204692 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.204709 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.204726 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.204741 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.308721 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.308771 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.308781 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.308797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.308811 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.357467 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.357553 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.357524 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.357491 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:14 crc kubenswrapper[4865]: E0126 16:56:14.357741 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:14 crc kubenswrapper[4865]: E0126 16:56:14.357861 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:14 crc kubenswrapper[4865]: E0126 16:56:14.357953 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:14 crc kubenswrapper[4865]: E0126 16:56:14.358243 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.374933 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.389010 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.399393 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.409562 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.411134 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.411195 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.411212 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.411232 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.411245 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.421137 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.430322 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.446818 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.456840 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.459003 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 10:05:08.143722303 +0000 UTC Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.472366 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.485357 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.497297 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.513581 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.513644 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.513655 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.513674 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.513686 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.521609 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:56:08Z\\\",\\\"message\\\":\\\" 6909 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.991381 6909 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0126 16:56:07.991404 6909 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0126 16:56:07.991419 6909 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:56:07.991425 6909 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:56:07.991450 6909 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 16:56:07.991487 6909 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:56:07.991518 6909 factory.go:656] Stopping watch factory\\\\nI0126 16:56:07.992231 6909 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.992248 6909 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:56:07.992268 6909 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:56:07.992275 6909 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:56:07.992330 6909 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:56:07.992335 6909 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:56:07.992603 6909 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:56:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.540941 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e47e1f3-e364-439b-b53a-1be7c7855c0b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.554132 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.567184 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.582586 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.593893 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.602535 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.612335 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:14Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.615933 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.615966 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.615977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.616007 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.616020 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.719354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.719500 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.719521 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.719548 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.719597 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.822412 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.822528 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.822553 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.822583 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.822604 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.925249 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.925311 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.925327 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.925355 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:14 crc kubenswrapper[4865]: I0126 16:56:14.925367 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:14Z","lastTransitionTime":"2026-01-26T16:56:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.028734 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.028804 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.028820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.028843 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.028861 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.131956 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.132079 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.132097 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.132123 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.132141 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.235197 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.235250 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.235258 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.235271 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.235280 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.338459 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.338517 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.338542 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.338566 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.338582 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.441104 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.441163 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.441178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.441200 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.441213 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.459339 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 13:23:04.968572151 +0000 UTC Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.544271 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.544326 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.544339 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.544357 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.544374 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.647773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.647823 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.647832 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.647853 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.647865 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.750437 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.750478 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.750488 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.750502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.750511 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.852738 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.852780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.852798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.852816 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.852829 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.956020 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.956069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.956079 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.956100 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:15 crc kubenswrapper[4865]: I0126 16:56:15.956113 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:15Z","lastTransitionTime":"2026-01-26T16:56:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.059286 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.059358 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.059375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.059401 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.059417 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.162243 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.162304 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.162319 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.162340 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.162356 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.266124 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.266194 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.266212 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.266241 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.266260 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.357418 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.357471 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.357418 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.357540 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:16 crc kubenswrapper[4865]: E0126 16:56:16.357710 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:16 crc kubenswrapper[4865]: E0126 16:56:16.357818 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:16 crc kubenswrapper[4865]: E0126 16:56:16.358617 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:16 crc kubenswrapper[4865]: E0126 16:56:16.358832 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.368730 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.368767 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.368780 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.368795 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.368808 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.460458 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 05:50:39.456630791 +0000 UTC Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.471760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.471804 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.471820 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.471843 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.471860 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.574690 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.574720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.574728 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.574742 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.574753 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.677421 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.677501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.677526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.677556 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.677578 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.780999 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.781054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.781070 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.781088 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.781099 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.883687 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.883758 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.883773 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.883798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.883819 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.987593 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.987642 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.987652 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.987668 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:16 crc kubenswrapper[4865]: I0126 16:56:16.987678 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:16Z","lastTransitionTime":"2026-01-26T16:56:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.090879 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.090934 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.090947 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.090965 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.090979 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.194436 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.194479 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.194488 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.194502 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.194512 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.298207 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.298277 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.298299 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.298324 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.298345 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.401618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.401683 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.401696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.401715 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.401728 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.460950 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 12:24:50.369107085 +0000 UTC Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.505350 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.505394 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.505407 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.505424 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.505440 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.607733 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.607784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.607795 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.607812 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.607823 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.710784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.710852 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.710871 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.710899 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.710920 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.813981 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.814069 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.814088 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.814116 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.814137 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.917707 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.917828 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.917855 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.917889 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:17 crc kubenswrapper[4865]: I0126 16:56:17.917974 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:17Z","lastTransitionTime":"2026-01-26T16:56:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.020844 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.020912 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.020930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.020955 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.020972 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.125651 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.125706 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.125717 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.125735 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.125748 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.229204 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.229273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.229284 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.229305 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.229317 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.332476 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.332526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.332540 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.332559 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.332574 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.357874 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.358041 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.358138 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:18 crc kubenswrapper[4865]: E0126 16:56:18.358049 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:18 crc kubenswrapper[4865]: E0126 16:56:18.358244 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:18 crc kubenswrapper[4865]: E0126 16:56:18.358299 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.358377 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:18 crc kubenswrapper[4865]: E0126 16:56:18.358491 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.435176 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.435219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.435234 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.435250 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.435264 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.461376 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 21:41:16.46147426 +0000 UTC Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.537507 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.537555 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.537567 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.537584 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.537596 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.640688 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.640762 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.640790 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.640821 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.640853 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.743412 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.743475 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.743488 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.743515 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.743527 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.846335 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.846377 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.846385 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.846400 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.846412 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.948498 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.948535 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.948544 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.948556 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:18 crc kubenswrapper[4865]: I0126 16:56:18.948566 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:18Z","lastTransitionTime":"2026-01-26T16:56:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.051925 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.051977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.052026 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.052050 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.052067 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.160522 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.160607 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.160627 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.160656 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.160683 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.264925 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.264966 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.264977 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.265008 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.265020 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.368936 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.369040 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.369061 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.369129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.369187 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.462428 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 03:48:27.895865979 +0000 UTC Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.473244 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.473287 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.473304 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.473328 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.473346 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.577059 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.577424 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.577557 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.577685 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.578317 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.681720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.681764 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.681775 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.681791 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.681803 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.784980 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.785115 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.785147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.785182 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.785206 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.888698 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.888770 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.888786 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.888806 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.888818 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.992927 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.992984 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.993010 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.993027 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:19 crc kubenswrapper[4865]: I0126 16:56:19.993037 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:19Z","lastTransitionTime":"2026-01-26T16:56:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.095864 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.095938 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.095952 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.095969 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.095980 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.199051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.199127 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.199151 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.199177 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.199194 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.301928 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.301983 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.302012 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.302031 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.302046 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.356848 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.356926 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.356932 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.357155 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.357202 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.357315 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.357832 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.357914 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.405597 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.405684 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.405696 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.405719 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.405735 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.463153 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 20:55:06.001655869 +0000 UTC Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.508967 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.509078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.509114 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.509144 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.509164 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.591185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.591235 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.591273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.591293 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.591306 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.608842 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.613482 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.613562 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.613586 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.613621 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.613644 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.629848 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.634635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.634725 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.634748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.634779 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.634802 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.652700 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.657327 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.657365 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.657380 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.657397 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.657411 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.670790 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.675762 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.675797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.675806 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.675823 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.675835 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.692200 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T16:56:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4ac1a731-046d-4d8f-8161-6e9e491f5dac\\\",\\\"systemUUID\\\":\\\"35d9e2a2-68c2-48cb-856f-00ba3eb74617\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:20Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:20 crc kubenswrapper[4865]: E0126 16:56:20.692323 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.694869 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.694902 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.694913 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.694930 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.694944 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.798455 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.798501 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.798509 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.798525 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.798536 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.901669 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.901722 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.901736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.901768 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:20 crc kubenswrapper[4865]: I0126 16:56:20.901780 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:20Z","lastTransitionTime":"2026-01-26T16:56:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.005016 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.005077 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.005088 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.005109 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.005121 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.108315 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.108376 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.108389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.108409 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.108426 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.211444 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.211492 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.211505 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.211522 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.211534 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.314371 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.314434 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.314452 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.314473 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.314491 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.417535 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.417618 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.417628 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.417643 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.417654 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.464032 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 20:01:03.089366305 +0000 UTC Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.521096 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.521138 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.521147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.521159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.521168 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.623534 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.623575 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.623590 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.623608 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.623619 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.725532 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.725567 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.725576 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.725589 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.725600 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.827764 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.827808 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.827819 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.827833 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.827843 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.930416 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.930461 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.930469 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.930483 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:21 crc kubenswrapper[4865]: I0126 16:56:21.930494 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:21Z","lastTransitionTime":"2026-01-26T16:56:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.033622 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.033689 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.033700 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.033717 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.033728 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.139325 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.139375 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.139387 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.139404 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.139417 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.243176 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.243233 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.243245 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.243265 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.243277 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.346795 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.346864 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.346889 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.346921 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.346946 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.357465 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.357497 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.357645 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.357695 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.357712 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.357811 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.357899 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.358138 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.449878 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.449925 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.449940 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.449959 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.449972 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.464523 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 00:13:18.79841421 +0000 UTC Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.552266 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.552315 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.552333 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.552355 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.552368 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.655259 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.655715 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.655875 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.656054 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.656248 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.759129 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.759184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.759201 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.759226 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.759245 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.862350 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.862410 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.862429 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.862453 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.862472 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.959047 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.959300 4865 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:56:22 crc kubenswrapper[4865]: E0126 16:56:22.959697 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs podName:27ebe888-610a-47c4-b256-3ddbf03f83b9 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:26.959669476 +0000 UTC m=+174.543555063 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs") pod "network-metrics-daemon-wx7wp" (UID: "27ebe888-610a-47c4-b256-3ddbf03f83b9") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.966478 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.966539 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.966557 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.966581 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:22 crc kubenswrapper[4865]: I0126 16:56:22.966599 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:22Z","lastTransitionTime":"2026-01-26T16:56:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.069165 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.069215 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.069226 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.069243 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.069254 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.172677 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.172711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.172720 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.172734 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.172742 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.275952 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.276018 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.276036 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.276068 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.276086 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.378902 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.378948 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.378957 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.378971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.378981 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.464821 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 21:32:08.620935836 +0000 UTC Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.481165 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.481550 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.481804 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.482185 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.482396 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.584861 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.584907 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.584920 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.584937 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.584953 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.687348 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.687403 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.687427 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.687446 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.687466 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.790788 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.790850 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.790860 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.790881 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.790892 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.894672 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.894729 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.894740 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.894758 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.894770 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.997928 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.997986 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.998050 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.998072 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:23 crc kubenswrapper[4865]: I0126 16:56:23.998088 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:23Z","lastTransitionTime":"2026-01-26T16:56:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.101708 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.101767 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.101778 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.101802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.101819 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.206150 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.206230 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.206247 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.206273 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.206299 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.309287 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.309350 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.309365 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.309389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.309410 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.356851 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.356921 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:24 crc kubenswrapper[4865]: E0126 16:56:24.356974 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.356853 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:24 crc kubenswrapper[4865]: E0126 16:56:24.357071 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:24 crc kubenswrapper[4865]: E0126 16:56:24.357256 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.356875 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:24 crc kubenswrapper[4865]: E0126 16:56:24.357796 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.357811 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 16:56:24 crc kubenswrapper[4865]: E0126 16:56:24.358062 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.370498 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.381940 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.393717 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.403523 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.412226 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.412262 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.412276 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.412293 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.412308 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.414619 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.424538 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.438520 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.449313 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.460083 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.465209 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 14:56:32.787968505 +0000 UTC Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.471056 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.485317 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.503509 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee511e125da491001506ac0da29a1d7261d33f10c886fa265a81698ce5d2d8e4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:39Z\\\",\\\"message\\\":\\\"l\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0126 16:55:39.437634 6638 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI0126 16:55:39.437644 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-dns/node-resolver-p7qpc\\\\nF0126 16:55:39.437650 6638 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:55:39Z is after 2025-08-24T17:21:41Z]\\\\nI0126 16:55:39.437664 6638 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0126 16:55:39.437585 6638 services_controller.go:451] Built service openshift-marketplace/certified-operator\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:38Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:56:08Z\\\",\\\"message\\\":\\\" 6909 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.991381 6909 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0126 16:56:07.991404 6909 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0126 16:56:07.991419 6909 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:56:07.991425 6909 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:56:07.991450 6909 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 16:56:07.991487 6909 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:56:07.991518 6909 factory.go:656] Stopping watch factory\\\\nI0126 16:56:07.992231 6909 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.992248 6909 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:56:07.992268 6909 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:56:07.992275 6909 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:56:07.992330 6909 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:56:07.992335 6909 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:56:07.992603 6909 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:56:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.515099 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.515135 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.515147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.515162 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.515174 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.516820 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.526091 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.538645 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.556883 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.578822 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e47e1f3-e364-439b-b53a-1be7c7855c0b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.596779 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.608289 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.617679 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.617725 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.617736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.617753 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.617766 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.628846 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e47e1f3-e364-439b-b53a-1be7c7855c0b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fc69fe50939a952f84122d3cad7f10be6af95339fcd4acd4fac04c8a016dbbe6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36f7de44402f8a7aad2517680def4ccb193db48ab21336b1b6bc14f00c680744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a83657298df60ad66384b8292cedbf580a5e2919d078a6137d110ed389707\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aeed8d07b9e87fec731b2b36308602dd19790fc9218fe6fc0667127100916835\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fcfa6a28052cef728edc32e435c8e49560aca61dcdd9a1aee7fb96762c597434\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://62a0ad6430e07209cecff40860c0e36cb297f37d4f0230f89890b637009538b7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2a5a56bc9d7995aaedd0e9dac5e9755a5b145cf9e663df946b9fa8139174e4b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef21dd98ef762ec4d9fa31722e4cc551c00be7c0210f4e9f1415a2d8672c51b8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.639985 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e58c71a-f1f1-421b-8e21-53ef9b51b937\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c1c8845e13a8d4b226f13897c388c5e07b2e9442c182930b42a88b2ed7b299b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a953b0ed4474ae87ed2f4658deb39951e319c1675872e49ea28b634d3c37b8b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1160d459f3a91359793f3431bd62257ad71f9446c935f43b5265443946d3e11\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.650024 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-p7qpc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000aa434-a8c8-4051-88f4-c50d48ce851b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4d967fccbef000c343246b0f7d1cbdd98592d6dc499d444472c20588a6e6697\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m7gvt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-p7qpc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.664984 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bccdd94a-94b4-4a16-95dd-c375a34f754f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:20Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://90ccd48f832c010ac33cb2001b2e71aac437d4366744c03a232ff9d8dd4acea4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ff39b9e61bbd169188d426264ee57a8ca85d3912e9b2be037428b91bb19c360a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c9a6673ba825e6843d9b73a97df6af0fc56120e88fbf811acebec0c5c73a7b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f837648e33ddfc0f0fa813c02e071c58371657864b8e0119dc4dad3191f5081\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://603d534303d39969a1418f5b0d8c652a442130aa0701468f19e6b60d73f783c5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://23ba0301e585aba7eb6cd31bedaa859ae510949722f83fa637d629b0eaaa4663\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://012a0d6579a696a143f333e0c2369ccc4ad9de609183f3b0286726ea15e77d38\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qkvf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-6fsw2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.676273 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.687501 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-45gx2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f950c5c3-b446-44cf-9de5-dd7ff03b615f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10c92950abd6cddc41b262bd7e06b74fa47a01c012edd0bec0f5260b992ee70b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-lwt2v\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-45gx2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.697859 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ddaae5-b604-4352-b756-3a4ee374b6e3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b554efe88a77e7cf03fc70023bc8663bc926af4a8a95d657d5073fcc44c9f17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4131a32bf9f6e8aa00c8250af3d01f8768e1db9c93b7a25b925580f0f337c2c2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f9g7h\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:17Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-z64kc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.708622 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bz29j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d5c89572-d108-4b35-ab46-dfbbc8b7e3be\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:55:54Z\\\",\\\"message\\\":\\\"2026-01-26T16:55:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07\\\\n2026-01-26T16:55:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c36d9952-7cfc-4ebd-954c-df5db2c91d07 to /host/opt/cni/bin/\\\\n2026-01-26T16:55:09Z [verbose] multus-daemon started\\\\n2026-01-26T16:55:09Z [verbose] Readiness Indicator file check\\\\n2026-01-26T16:55:54Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-79bqr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bz29j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.718333 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"27ebe888-610a-47c4-b256-3ddbf03f83b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:18Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2dr8s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:18Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-wx7wp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.727686 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.727719 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.727727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.727860 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.727880 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.739284 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"83ffee97-9e36-462b-b020-39fcf1c33c00\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00669bc8bfcb8d7fa5d244f0819a79880bc778ed6aab7b68787c72b6f1709443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98604715e78c851f959f0c229d5e36f68203dd7d1af5522d2c76c17ec168f95b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ced4687ba65f43cdb20daccd718c699d6a86a64138439b5f8fbe6bbc37b1ba3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5d2ec0d2b95278257555fc63611b54fc6c16f5abbb660c6dcd07c256495f67cf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.753419 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.797973 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b2cd44e037e42e82e40d3b3a2bc4e00c18804301aa567cf5bf5c7c25ef536be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.812259 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0ddedab5-2528-4881-9251-9ba5334aea61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b9a5f5b650bd1b1e2875d8767634d7ac74fd1c90ea76f84e65f5e5078828be62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8jdbl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-q8cb9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.824591 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.831160 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.831217 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.831228 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.831245 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.831256 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.846291 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c0135f6-4074-4aab-9413-a8eb948cd566\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T16:56:08Z\\\",\\\"message\\\":\\\" 6909 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.991381 6909 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0126 16:56:07.991404 6909 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0126 16:56:07.991419 6909 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0126 16:56:07.991425 6909 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0126 16:56:07.991450 6909 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0126 16:56:07.991487 6909 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0126 16:56:07.991518 6909 factory.go:656] Stopping watch factory\\\\nI0126 16:56:07.992231 6909 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 16:56:07.992248 6909 handler.go:208] Removed *v1.Node event handler 7\\\\nI0126 16:56:07.992268 6909 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0126 16:56:07.992275 6909 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0126 16:56:07.992330 6909 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0126 16:56:07.992335 6909 handler.go:208] Removed *v1.Node event handler 2\\\\nI0126 16:56:07.992603 6909 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:56:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:55:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:55:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vcbts\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:55:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-44x2q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.864049 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8caee532-e315-445d-b84c-730aaa98f649\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T16:55:01Z\\\",\\\"message\\\":\\\"26 16:55:01.651944 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 16:55:01.651948 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 16:55:01.651953 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 16:55:01.651913 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI0126 16:55:01.656092 1 tlsconfig.go:203] \\\\\\\"Loaded serving cert\\\\\\\" certName=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"localhost\\\\\\\\\\\\\\\" [serving] validServingFor=[localhost] issuer=\\\\\\\\\\\\\\\"check-endpoints-signer@1769446485\\\\\\\\\\\\\\\" (2026-01-26 16:54:44 +0000 UTC to 2026-02-25 16:54:45 +0000 UTC (now=2026-01-26 16:55:01.656056838 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656240 1 named_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769446496\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769446495\\\\\\\\\\\\\\\" (2026-01-26 15:54:55 +0000 UTC to 2027-01-26 15:54:55 +0000 UTC (now=2026-01-26 16:55:01.656210252 +0000 UTC))\\\\\\\"\\\\nI0126 16:55:01.656262 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0126 16:55:01.656285 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0126 16:55:01.656314 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656337 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0126 16:55:01.656365 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2264924310/tls.crt::/tmp/serving-cert-2264924310/tls.key\\\\\\\"\\\\nI0126 16:55:01.656471 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nF0126 16:55:01.657122 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:44Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:42Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.879258 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c52621ee-c29b-488b-a3bc-1614bcc37576\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T16:54:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://97bf00e18f645318dd33d3ca36e9eda72e760adf20b3b28332628932c8e5b38b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:54:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://00c85dd79fa2ffbae8e6a95d729ed74719c871cdfb91f69a761ae95cec2055b3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T16:54:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T16:54:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T16:54:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.898482 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:03Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44c06740a3b924e4b54afda160e7e790e2bec67ca8852b703887591802f029af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.916196 4865 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T16:55:02Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://004f8be0140f41e93a5c5bc1ff9f56c6e245ab27a8c0aa8dcbb4b2aee2ce8b73\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3633767c298ed2680b706085d62fa45140e09856b742cbcf4168893827107862\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T16:55:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T16:56:24Z is after 2025-08-24T17:21:41Z" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.934704 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.934784 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.934797 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.934815 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:24 crc kubenswrapper[4865]: I0126 16:56:24.934829 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:24Z","lastTransitionTime":"2026-01-26T16:56:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.038731 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.038774 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.038787 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.038806 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.038819 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.141962 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.142046 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.142067 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.142090 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.142108 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.245600 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.245685 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.245711 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.245743 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.245768 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.349371 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.349440 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.349465 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.349498 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.349521 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.452105 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.452182 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.452197 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.452219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.452236 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.466503 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 17:06:56.773277265 +0000 UTC Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.556157 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.556219 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.556238 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.556260 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.556275 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.660221 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.660326 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.660341 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.660366 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.660379 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.764290 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.764342 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.764354 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.764390 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.764403 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.867051 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.867124 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.867136 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.867159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.867173 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.970689 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.970760 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.970774 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.970793 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:25 crc kubenswrapper[4865]: I0126 16:56:25.970807 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:25Z","lastTransitionTime":"2026-01-26T16:56:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.074593 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.074659 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.074675 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.074693 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.074705 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.179093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.179159 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.179172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.179193 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.179205 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.282637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.282724 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.282751 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.282786 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.282810 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.357633 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.357709 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.357658 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:26 crc kubenswrapper[4865]: E0126 16:56:26.357836 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:26 crc kubenswrapper[4865]: E0126 16:56:26.357918 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:26 crc kubenswrapper[4865]: E0126 16:56:26.358030 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.358080 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:26 crc kubenswrapper[4865]: E0126 16:56:26.358151 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.385580 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.385629 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.385645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.385662 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.385673 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.467738 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 20:34:33.716617644 +0000 UTC Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.488287 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.488353 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.488378 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.488409 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.488469 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.590736 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.590802 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.590819 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.590843 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.590864 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.693776 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.693863 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.693888 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.693919 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.693943 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.797421 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.797496 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.797526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.797573 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.797600 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.901364 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.901443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.901462 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.901491 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:26 crc kubenswrapper[4865]: I0126 16:56:26.901512 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:26Z","lastTransitionTime":"2026-01-26T16:56:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.007141 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.007220 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.007243 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.007280 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.007307 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.110590 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.110635 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.110645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.110658 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.110673 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.213062 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.213106 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.213146 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.213164 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.213176 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.316667 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.316735 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.316748 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.316769 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.316781 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.420023 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.420104 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.420121 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.420530 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.420588 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.468308 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 10:17:20.882914826 +0000 UTC Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.523379 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.523453 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.523476 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.523506 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.523533 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.626536 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.626593 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.626610 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.626637 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.626652 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.729971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.730065 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.730074 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.730093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.730103 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.832730 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.832785 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.832801 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.832826 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.832843 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.936224 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.936301 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.936319 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.936345 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:27 crc kubenswrapper[4865]: I0126 16:56:27.936363 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:27Z","lastTransitionTime":"2026-01-26T16:56:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.039078 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.039133 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.039147 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.039168 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.039181 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.142932 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.143063 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.143093 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.143133 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.143161 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.247178 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.247258 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.247275 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.247727 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.247782 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.350685 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.350728 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.350740 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.350758 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.350770 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.357761 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.357791 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.357834 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.357791 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:28 crc kubenswrapper[4865]: E0126 16:56:28.357941 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:28 crc kubenswrapper[4865]: E0126 16:56:28.358045 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:28 crc kubenswrapper[4865]: E0126 16:56:28.358265 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:28 crc kubenswrapper[4865]: E0126 16:56:28.358358 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.453306 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.453378 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.453395 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.453416 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.453433 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.469141 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 03:19:00.321496416 +0000 UTC Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.556139 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.556179 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.556188 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.556203 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.556215 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.658745 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.658790 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.658798 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.658812 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.658821 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.761314 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.761359 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.761370 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.761389 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.761402 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.864331 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.864399 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.864413 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.864435 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.864449 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.972085 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.972153 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.972172 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.972198 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:28 crc kubenswrapper[4865]: I0126 16:56:28.972216 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:28Z","lastTransitionTime":"2026-01-26T16:56:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.075507 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.075568 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.075580 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.075595 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.075633 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.178959 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.179059 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.179073 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.179088 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.179135 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.282326 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.282391 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.282414 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.282443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.282469 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.385589 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.385645 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.385661 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.385679 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.385692 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.469259 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 00:10:03.022474573 +0000 UTC Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.489092 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.489156 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.489167 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.489186 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.489201 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.593177 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.593225 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.593240 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.593264 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.593277 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.696487 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.696550 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.696561 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.696583 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.696595 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.799049 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.799106 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.799118 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.799136 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.799149 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.902362 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.902430 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.902443 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.902467 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:29 crc kubenswrapper[4865]: I0126 16:56:29.902479 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:29Z","lastTransitionTime":"2026-01-26T16:56:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.005394 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.005441 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.005452 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.005471 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.005483 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.108246 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.108288 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.108297 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.108311 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.108322 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.211461 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.211526 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.211547 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.211571 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.211586 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.314026 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.314169 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.314184 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.314202 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.314214 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.357630 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.357671 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:30 crc kubenswrapper[4865]: E0126 16:56:30.357755 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.357781 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.357862 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:30 crc kubenswrapper[4865]: E0126 16:56:30.357968 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:30 crc kubenswrapper[4865]: E0126 16:56:30.358079 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:30 crc kubenswrapper[4865]: E0126 16:56:30.358127 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.417630 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.417677 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.417690 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.417707 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.417718 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.469665 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 18:27:16.047387431 +0000 UTC Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.519893 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.519971 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.520020 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.520047 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.520068 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.622445 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.622527 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.622547 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.622578 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.622602 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.724741 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.724787 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.724800 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.724829 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.724841 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.827756 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.827813 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.827824 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.827841 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.827855 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.848545 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.848587 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.848601 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.848856 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.848887 4865 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T16:56:30Z","lastTransitionTime":"2026-01-26T16:56:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.954738 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d"] Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.955228 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.957441 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.957521 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.957554 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.957616 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.984517 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-z64kc" podStartSLOduration=86.984493205 podStartE2EDuration="1m26.984493205s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:30.969100616 +0000 UTC m=+118.552986203" watchObservedRunningTime="2026-01-26 16:56:30.984493205 +0000 UTC m=+118.568378782" Jan 26 16:56:30 crc kubenswrapper[4865]: I0126 16:56:30.995707 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-45gx2" podStartSLOduration=87.995683994 podStartE2EDuration="1m27.995683994s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:30.995301873 +0000 UTC m=+118.579187460" watchObservedRunningTime="2026-01-26 16:56:30.995683994 +0000 UTC m=+118.579569581" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.023604 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podStartSLOduration=88.02358116 podStartE2EDuration="1m28.02358116s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.023513628 +0000 UTC m=+118.607399215" watchObservedRunningTime="2026-01-26 16:56:31.02358116 +0000 UTC m=+118.607466747" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.045389 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-bz29j" podStartSLOduration=88.045371032 podStartE2EDuration="1m28.045371032s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.043396565 +0000 UTC m=+118.627282162" watchObservedRunningTime="2026-01-26 16:56:31.045371032 +0000 UTC m=+118.629256619" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.046975 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9d77d07-120c-4547-8600-97f2ca3d8ee1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.047032 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9d77d07-120c-4547-8600-97f2ca3d8ee1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.047095 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.047123 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9d77d07-120c-4547-8600-97f2ca3d8ee1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.047152 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.067293 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=62.067274147 podStartE2EDuration="1m2.067274147s" podCreationTimestamp="2026-01-26 16:55:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.066858315 +0000 UTC m=+118.650743902" watchObservedRunningTime="2026-01-26 16:56:31.067274147 +0000 UTC m=+118.651159734" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147626 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9d77d07-120c-4547-8600-97f2ca3d8ee1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147663 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9d77d07-120c-4547-8600-97f2ca3d8ee1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147701 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147717 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9d77d07-120c-4547-8600-97f2ca3d8ee1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147732 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147777 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.147799 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/f9d77d07-120c-4547-8600-97f2ca3d8ee1-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.148933 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f9d77d07-120c-4547-8600-97f2ca3d8ee1-service-ca\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.156199 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9d77d07-120c-4547-8600-97f2ca3d8ee1-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.166958 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f9d77d07-120c-4547-8600-97f2ca3d8ee1-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-s6l9d\" (UID: \"f9d77d07-120c-4547-8600-97f2ca3d8ee1\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.199791 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=90.199772486 podStartE2EDuration="1m30.199772486s" podCreationTimestamp="2026-01-26 16:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.187391513 +0000 UTC m=+118.771277100" watchObservedRunningTime="2026-01-26 16:56:31.199772486 +0000 UTC m=+118.783658073" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.199914 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=28.19991031 podStartE2EDuration="28.19991031s" podCreationTimestamp="2026-01-26 16:56:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.199454197 +0000 UTC m=+118.783339804" watchObservedRunningTime="2026-01-26 16:56:31.19991031 +0000 UTC m=+118.783795897" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.211644 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-p7qpc" podStartSLOduration=88.211629504 podStartE2EDuration="1m28.211629504s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.211267544 +0000 UTC m=+118.795153131" watchObservedRunningTime="2026-01-26 16:56:31.211629504 +0000 UTC m=+118.795515091" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.286369 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.298219 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=25.298199156 podStartE2EDuration="25.298199156s" podCreationTimestamp="2026-01-26 16:56:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.297109665 +0000 UTC m=+118.880995282" watchObservedRunningTime="2026-01-26 16:56:31.298199156 +0000 UTC m=+118.882084743" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.298527 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-6fsw2" podStartSLOduration=88.298519886 podStartE2EDuration="1m28.298519886s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.231509411 +0000 UTC m=+118.815395008" watchObservedRunningTime="2026-01-26 16:56:31.298519886 +0000 UTC m=+118.882405473" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.314564 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=88.314541912 podStartE2EDuration="1m28.314541912s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.314300635 +0000 UTC m=+118.898186232" watchObservedRunningTime="2026-01-26 16:56:31.314541912 +0000 UTC m=+118.898427509" Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.470562 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 22:23:29.981576771 +0000 UTC Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.471042 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.480669 4865 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.631425 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" event={"ID":"f9d77d07-120c-4547-8600-97f2ca3d8ee1","Type":"ContainerStarted","Data":"c41160ed62bfad2aba374055db392b3e1d3f9b4cdcf83cc1a7ce397b7b56fb17"} Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.631500 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" event={"ID":"f9d77d07-120c-4547-8600-97f2ca3d8ee1","Type":"ContainerStarted","Data":"a6d85e16b7f7086414dc64fe120df620675a90c4f86cdee3d00019b313af05c2"} Jan 26 16:56:31 crc kubenswrapper[4865]: I0126 16:56:31.649034 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-s6l9d" podStartSLOduration=88.648970521 podStartE2EDuration="1m28.648970521s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:31.647601722 +0000 UTC m=+119.231487329" watchObservedRunningTime="2026-01-26 16:56:31.648970521 +0000 UTC m=+119.232856148" Jan 26 16:56:32 crc kubenswrapper[4865]: I0126 16:56:32.357695 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:32 crc kubenswrapper[4865]: I0126 16:56:32.357732 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:32 crc kubenswrapper[4865]: I0126 16:56:32.357762 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:32 crc kubenswrapper[4865]: E0126 16:56:32.358760 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:32 crc kubenswrapper[4865]: E0126 16:56:32.358560 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:32 crc kubenswrapper[4865]: I0126 16:56:32.357934 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:32 crc kubenswrapper[4865]: E0126 16:56:32.358921 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:32 crc kubenswrapper[4865]: E0126 16:56:32.358969 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:34 crc kubenswrapper[4865]: E0126 16:56:34.178656 4865 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 26 16:56:34 crc kubenswrapper[4865]: I0126 16:56:34.357172 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:34 crc kubenswrapper[4865]: I0126 16:56:34.357182 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:34 crc kubenswrapper[4865]: I0126 16:56:34.357220 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:34 crc kubenswrapper[4865]: E0126 16:56:34.358299 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:34 crc kubenswrapper[4865]: I0126 16:56:34.358328 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:34 crc kubenswrapper[4865]: E0126 16:56:34.358526 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:34 crc kubenswrapper[4865]: E0126 16:56:34.358645 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:34 crc kubenswrapper[4865]: E0126 16:56:34.358737 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:35 crc kubenswrapper[4865]: E0126 16:56:35.078647 4865 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 16:56:36 crc kubenswrapper[4865]: I0126 16:56:36.357965 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:36 crc kubenswrapper[4865]: I0126 16:56:36.358113 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:36 crc kubenswrapper[4865]: E0126 16:56:36.358669 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:36 crc kubenswrapper[4865]: E0126 16:56:36.358836 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:36 crc kubenswrapper[4865]: I0126 16:56:36.359127 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:36 crc kubenswrapper[4865]: I0126 16:56:36.359224 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 16:56:36 crc kubenswrapper[4865]: E0126 16:56:36.359458 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:36 crc kubenswrapper[4865]: I0126 16:56:36.359647 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:36 crc kubenswrapper[4865]: E0126 16:56:36.359797 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:36 crc kubenswrapper[4865]: E0126 16:56:36.359643 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-44x2q_openshift-ovn-kubernetes(0c0135f6-4074-4aab-9413-a8eb948cd566)\"" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" Jan 26 16:56:38 crc kubenswrapper[4865]: I0126 16:56:38.357236 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:38 crc kubenswrapper[4865]: E0126 16:56:38.357422 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:38 crc kubenswrapper[4865]: I0126 16:56:38.357468 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:38 crc kubenswrapper[4865]: I0126 16:56:38.357508 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:38 crc kubenswrapper[4865]: I0126 16:56:38.357588 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:38 crc kubenswrapper[4865]: E0126 16:56:38.357623 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:38 crc kubenswrapper[4865]: E0126 16:56:38.357759 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:38 crc kubenswrapper[4865]: E0126 16:56:38.357852 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:40 crc kubenswrapper[4865]: E0126 16:56:40.083016 4865 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 16:56:40 crc kubenswrapper[4865]: I0126 16:56:40.357637 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:40 crc kubenswrapper[4865]: I0126 16:56:40.357706 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:40 crc kubenswrapper[4865]: I0126 16:56:40.357718 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:40 crc kubenswrapper[4865]: I0126 16:56:40.357671 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:40 crc kubenswrapper[4865]: E0126 16:56:40.357832 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:40 crc kubenswrapper[4865]: E0126 16:56:40.357919 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:40 crc kubenswrapper[4865]: E0126 16:56:40.358050 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:40 crc kubenswrapper[4865]: E0126 16:56:40.358850 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.105515 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/1.log" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.106054 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/0.log" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.106109 4865 generic.go:334] "Generic (PLEG): container finished" podID="d5c89572-d108-4b35-ab46-dfbbc8b7e3be" containerID="d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36" exitCode=1 Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.106150 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerDied","Data":"d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36"} Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.106200 4865 scope.go:117] "RemoveContainer" containerID="82ddf2354d7df028aad27125fddbfccd87910a5407d7d63ff39865dd9cedcaa7" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.106830 4865 scope.go:117] "RemoveContainer" containerID="d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36" Jan 26 16:56:42 crc kubenswrapper[4865]: E0126 16:56:42.107361 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-bz29j_openshift-multus(d5c89572-d108-4b35-ab46-dfbbc8b7e3be)\"" pod="openshift-multus/multus-bz29j" podUID="d5c89572-d108-4b35-ab46-dfbbc8b7e3be" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.357872 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:42 crc kubenswrapper[4865]: E0126 16:56:42.358060 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.357910 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.358112 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:42 crc kubenswrapper[4865]: E0126 16:56:42.358149 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:42 crc kubenswrapper[4865]: E0126 16:56:42.358203 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:42 crc kubenswrapper[4865]: I0126 16:56:42.357872 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:42 crc kubenswrapper[4865]: E0126 16:56:42.358476 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:43 crc kubenswrapper[4865]: I0126 16:56:43.111543 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/1.log" Jan 26 16:56:44 crc kubenswrapper[4865]: I0126 16:56:44.357164 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:44 crc kubenswrapper[4865]: I0126 16:56:44.357208 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:44 crc kubenswrapper[4865]: I0126 16:56:44.357302 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:44 crc kubenswrapper[4865]: E0126 16:56:44.359878 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:44 crc kubenswrapper[4865]: I0126 16:56:44.359917 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:44 crc kubenswrapper[4865]: E0126 16:56:44.360045 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:44 crc kubenswrapper[4865]: E0126 16:56:44.360148 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:44 crc kubenswrapper[4865]: E0126 16:56:44.360256 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:45 crc kubenswrapper[4865]: E0126 16:56:45.084837 4865 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 16:56:46 crc kubenswrapper[4865]: I0126 16:56:46.357081 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:46 crc kubenswrapper[4865]: I0126 16:56:46.357183 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:46 crc kubenswrapper[4865]: I0126 16:56:46.357194 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:46 crc kubenswrapper[4865]: I0126 16:56:46.357105 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:46 crc kubenswrapper[4865]: E0126 16:56:46.357285 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:46 crc kubenswrapper[4865]: E0126 16:56:46.357365 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:46 crc kubenswrapper[4865]: E0126 16:56:46.357470 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:46 crc kubenswrapper[4865]: E0126 16:56:46.357521 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:48 crc kubenswrapper[4865]: I0126 16:56:48.357092 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:48 crc kubenswrapper[4865]: I0126 16:56:48.357161 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:48 crc kubenswrapper[4865]: I0126 16:56:48.357197 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:48 crc kubenswrapper[4865]: E0126 16:56:48.357277 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:48 crc kubenswrapper[4865]: I0126 16:56:48.357290 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:48 crc kubenswrapper[4865]: E0126 16:56:48.357419 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:48 crc kubenswrapper[4865]: E0126 16:56:48.357799 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:48 crc kubenswrapper[4865]: E0126 16:56:48.357895 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:49 crc kubenswrapper[4865]: I0126 16:56:49.358832 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 16:56:50 crc kubenswrapper[4865]: E0126 16:56:50.086877 4865 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.142838 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/3.log" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.145923 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerStarted","Data":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.146282 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.182007 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podStartSLOduration=107.181973406 podStartE2EDuration="1m47.181973406s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:56:50.180332719 +0000 UTC m=+137.764218326" watchObservedRunningTime="2026-01-26 16:56:50.181973406 +0000 UTC m=+137.765858993" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.357527 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.357576 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.357668 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:50 crc kubenswrapper[4865]: E0126 16:56:50.357671 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:50 crc kubenswrapper[4865]: E0126 16:56:50.357759 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:50 crc kubenswrapper[4865]: E0126 16:56:50.357849 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.357896 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:50 crc kubenswrapper[4865]: E0126 16:56:50.358053 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:50 crc kubenswrapper[4865]: I0126 16:56:50.534694 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-wx7wp"] Jan 26 16:56:51 crc kubenswrapper[4865]: I0126 16:56:51.149019 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:51 crc kubenswrapper[4865]: E0126 16:56:51.149517 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:52 crc kubenswrapper[4865]: I0126 16:56:52.356917 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:52 crc kubenswrapper[4865]: I0126 16:56:52.357048 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:52 crc kubenswrapper[4865]: I0126 16:56:52.356947 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:52 crc kubenswrapper[4865]: E0126 16:56:52.357104 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:52 crc kubenswrapper[4865]: E0126 16:56:52.357217 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:52 crc kubenswrapper[4865]: I0126 16:56:52.357275 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:52 crc kubenswrapper[4865]: E0126 16:56:52.357384 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:52 crc kubenswrapper[4865]: E0126 16:56:52.357600 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:54 crc kubenswrapper[4865]: I0126 16:56:54.357577 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:54 crc kubenswrapper[4865]: I0126 16:56:54.357598 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:54 crc kubenswrapper[4865]: I0126 16:56:54.357607 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:54 crc kubenswrapper[4865]: I0126 16:56:54.357577 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:54 crc kubenswrapper[4865]: E0126 16:56:54.359204 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:54 crc kubenswrapper[4865]: E0126 16:56:54.359268 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:54 crc kubenswrapper[4865]: E0126 16:56:54.359327 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:54 crc kubenswrapper[4865]: E0126 16:56:54.359385 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:55 crc kubenswrapper[4865]: E0126 16:56:55.087706 4865 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 16:56:56 crc kubenswrapper[4865]: I0126 16:56:56.357586 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:56 crc kubenswrapper[4865]: I0126 16:56:56.357654 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:56 crc kubenswrapper[4865]: I0126 16:56:56.357664 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:56 crc kubenswrapper[4865]: I0126 16:56:56.357606 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:56 crc kubenswrapper[4865]: E0126 16:56:56.357801 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:56 crc kubenswrapper[4865]: E0126 16:56:56.357911 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:56 crc kubenswrapper[4865]: E0126 16:56:56.358122 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:56 crc kubenswrapper[4865]: E0126 16:56:56.358210 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:56:56 crc kubenswrapper[4865]: I0126 16:56:56.359522 4865 scope.go:117] "RemoveContainer" containerID="d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36" Jan 26 16:56:57 crc kubenswrapper[4865]: I0126 16:56:57.181981 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/1.log" Jan 26 16:56:57 crc kubenswrapper[4865]: I0126 16:56:57.182488 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerStarted","Data":"3c2a483662a22d2466f77960d662a82ad053f1f5442f5f3d8255e03ba9ca55fa"} Jan 26 16:56:58 crc kubenswrapper[4865]: I0126 16:56:58.357088 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:56:58 crc kubenswrapper[4865]: I0126 16:56:58.357115 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:56:58 crc kubenswrapper[4865]: I0126 16:56:58.357258 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:56:58 crc kubenswrapper[4865]: I0126 16:56:58.357263 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:56:58 crc kubenswrapper[4865]: E0126 16:56:58.357418 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 16:56:58 crc kubenswrapper[4865]: E0126 16:56:58.357675 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-wx7wp" podUID="27ebe888-610a-47c4-b256-3ddbf03f83b9" Jan 26 16:56:58 crc kubenswrapper[4865]: E0126 16:56:58.357836 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 16:56:58 crc kubenswrapper[4865]: E0126 16:56:58.357977 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.357504 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.357597 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.357546 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.358769 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.361183 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.361534 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.361629 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.361814 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.362147 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 16:57:00 crc kubenswrapper[4865]: I0126 16:57:00.362412 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.528362 4865 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.577181 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4bktv"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.577620 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.582936 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djm26"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.583237 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.583420 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fl8hv"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.583643 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.583930 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.584232 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.584291 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.584805 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.584946 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.585095 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.585186 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.585376 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.585939 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.586204 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.593175 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.593393 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.593700 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.598445 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.598505 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.598901 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.599147 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.601211 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.604382 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.613680 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.613678 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.613943 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.614728 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.615132 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.615688 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.616178 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-npnhh"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.616745 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.617192 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.617603 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.618851 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.618870 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.618901 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.618968 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.618981 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.619043 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.619001 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.622380 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.622524 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.623112 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.623595 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.625926 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.626033 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.626177 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.626213 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.626300 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.627999 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.628703 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.629136 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6lhmn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.629591 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.629661 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7pntj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.630204 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.630411 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.633516 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.635818 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.636019 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.637136 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638041 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638072 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638118 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638291 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638548 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638717 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.638902 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.639310 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.640489 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.641552 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.641886 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642051 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642622 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642657 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642741 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642874 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.642986 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.643112 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.643123 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.643171 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.643328 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.643538 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.652086 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tndns"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.662983 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.667168 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.670668 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.688609 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.688786 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.689115 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.689481 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.689550 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.689611 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.689678 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.690253 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.690942 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.691261 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.691302 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.691261 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.691665 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mfq2t"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.691961 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692162 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-config\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692218 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-serving-cert\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692245 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npcbp\" (UniqueName: \"kubernetes.io/projected/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-kube-api-access-npcbp\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692268 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6599bcb-4a91-4fdd-9101-877df6425122-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692288 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-config\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692299 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692310 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-service-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692351 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692377 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g5d6\" (UniqueName: \"kubernetes.io/projected/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-kube-api-access-8g5d6\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692400 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692424 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6599bcb-4a91-4fdd-9101-877df6425122-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692472 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f647n\" (UniqueName: \"kubernetes.io/projected/e6599bcb-4a91-4fdd-9101-877df6425122-kube-api-access-f647n\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692500 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-images\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692799 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692801 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.692853 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.693062 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.693574 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.697126 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.698401 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.698582 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.699655 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.699840 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.699962 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.700084 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.700183 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.701728 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-gx9t9"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.703864 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.704086 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.705117 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.705444 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.705570 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.705746 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.706152 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.706476 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.706590 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.706957 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.709815 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.710049 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.710321 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.714333 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.715102 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.715201 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.715366 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.715828 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.715926 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.717450 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.717624 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.717787 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.717949 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68xvn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.718572 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.718965 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vsrcj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.719681 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.719722 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.719973 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.720177 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.720289 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.723958 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.725672 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.726307 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.726947 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.729691 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.730692 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.731789 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.732493 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.733141 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.734194 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.735630 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.740887 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.743041 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.751211 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.751632 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.752696 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.771214 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.774115 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.778027 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.778247 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.778404 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.778639 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.778890 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.779521 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.779975 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.780024 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.780179 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.780269 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.781536 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.781659 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.781945 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gzx5p"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.782099 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.782345 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.782548 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.783135 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4bktv"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.783159 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.787259 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.789154 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794043 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794104 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794139 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794160 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g5d6\" (UniqueName: \"kubernetes.io/projected/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-kube-api-access-8g5d6\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794183 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794288 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794344 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794382 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-trusted-ca\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794412 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6599bcb-4a91-4fdd-9101-877df6425122-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794442 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794465 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-config\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794524 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f647n\" (UniqueName: \"kubernetes.io/projected/e6599bcb-4a91-4fdd-9101-877df6425122-kube-api-access-f647n\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794546 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794573 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794603 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-images\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794646 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-config\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794671 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794699 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8lww\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794719 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcrms\" (UniqueName: \"kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794749 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794790 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8vrh\" (UniqueName: \"kubernetes.io/projected/e225c1fe-ceb6-47c2-9721-cfaf39515364-kube-api-access-b8vrh\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794823 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-serving-cert\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794853 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npcbp\" (UniqueName: \"kubernetes.io/projected/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-kube-api-access-npcbp\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794880 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794902 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794930 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6599bcb-4a91-4fdd-9101-877df6425122-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794951 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-config\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.794973 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-service-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.795010 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e225c1fe-ceb6-47c2-9721-cfaf39515364-serving-cert\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.796537 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-config\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.797016 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.799934 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-config\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.800338 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-images\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.800673 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6599bcb-4a91-4fdd-9101-877df6425122-config\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.800810 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.801356 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-service-ca-bundle\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.801452 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-npnhh"] Jan 26 16:57:01 crc kubenswrapper[4865]: E0126 16:57:01.801729 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.301703917 +0000 UTC m=+149.885589504 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.803981 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.808628 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.809278 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vsrcj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.815359 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6lhmn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.815434 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djm26"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.815451 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.821485 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.821548 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68xvn"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.821562 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.826391 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.826434 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.826447 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-c26dz"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.827165 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.829710 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tndns"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.829959 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-serving-cert\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.830623 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.831567 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-hn6rt"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.831911 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e6599bcb-4a91-4fdd-9101-877df6425122-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.832348 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fl8hv"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.832425 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.833512 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.834227 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.836419 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.842311 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7pntj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.845898 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.847541 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.848975 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mfq2t"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.851177 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.852351 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.852660 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.853866 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.855200 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.856686 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.858142 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.859251 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.859523 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.861234 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.862379 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.863577 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.865444 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-gtdlb"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.873895 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.874146 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.876038 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-hn6rt"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.886641 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.886921 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.889772 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gtdlb"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.893359 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gzx5p"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.895076 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wx8d6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897603 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:01 crc kubenswrapper[4865]: E0126 16:57:01.897742 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.397726462 +0000 UTC m=+149.981612049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897865 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897886 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b7ls\" (UniqueName: \"kubernetes.io/projected/9c1dc714-6c88-4db6-ac20-54b9771956a3-kube-api-access-8b7ls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897901 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897916 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-676w5\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-kube-api-access-676w5\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897935 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897953 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.897968 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-serving-cert\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898008 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djb9j\" (UniqueName: \"kubernetes.io/projected/2918b19b-b96c-4b24-988d-f486d9cee307-kube-api-access-djb9j\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898384 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-images\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898427 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7cd346f7-8914-4089-8e15-89085f8340d2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898448 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898470 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898500 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898527 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltnmr\" (UniqueName: \"kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898554 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898576 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898610 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fllt7\" (UniqueName: \"kubernetes.io/projected/c62c3a20-356b-487a-80b8-aaea5650712b-kube-api-access-fllt7\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898639 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcrms\" (UniqueName: \"kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898661 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-client\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898691 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ae57544-4ad2-46e2-b205-3a770abfc47f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898717 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898739 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-machine-approver-tls\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898763 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9r5n\" (UniqueName: \"kubernetes.io/projected/08899e06-b9e4-4fda-b49b-879f8789ea40-kube-api-access-w9r5n\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898781 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5eb1e8d-ffa8-4422-aa5d-852074436139-service-ca-bundle\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898796 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-client\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898813 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcd2q\" (UniqueName: \"kubernetes.io/projected/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-kube-api-access-kcd2q\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898830 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9k2g\" (UniqueName: \"kubernetes.io/projected/c9a81e77-6ca5-431f-96a8-2de21b7a214c-kube-api-access-c9k2g\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898845 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898859 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898873 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-client\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898890 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-serving-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898907 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjjjb\" (UniqueName: \"kubernetes.io/projected/55e71027-c397-4e49-adef-6f8d1c7d78d2-kube-api-access-mjjjb\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898921 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898935 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.898959 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-metrics-tls\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.900575 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.900169 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-serving-cert\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.901575 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.901699 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.901856 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96p5c\" (UniqueName: \"kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.902033 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2918b19b-b96c-4b24-988d-f486d9cee307-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.902131 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-srv-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.902237 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/723c5d82-43eb-4833-b1d3-ad5236350fcc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903067 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e225c1fe-ceb6-47c2-9721-cfaf39515364-serving-cert\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903162 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903249 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-proxy-tls\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903329 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903410 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903485 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903571 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkrz7\" (UniqueName: \"kubernetes.io/projected/9156ce7a-0206-4509-b090-d2e93c83f425-kube-api-access-wkrz7\") pod \"downloads-7954f5f757-7pntj\" (UID: \"9156ce7a-0206-4509-b090-d2e93c83f425\") " pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903851 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf27h\" (UniqueName: \"kubernetes.io/projected/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-kube-api-access-kf27h\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.902251 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.902947 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.903980 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjxrx\" (UniqueName: \"kubernetes.io/projected/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-kube-api-access-bjxrx\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904106 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-serving-cert\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904132 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904160 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-default-certificate\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904195 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904222 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-auth-proxy-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904246 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904267 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904309 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-service-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904347 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904387 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904448 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/723c5d82-43eb-4833-b1d3-ad5236350fcc-serving-cert\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904478 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0af9b5a1-9cca-41dc-9a23-823a440db8c1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904505 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904530 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904557 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904580 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904604 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/55e71027-c397-4e49-adef-6f8d1c7d78d2-proxy-tls\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904636 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5c5r\" (UniqueName: \"kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904657 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904774 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904855 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904884 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/092200be-8013-4057-b586-d321c27dc3fb-tmpfs\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904906 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c62c3a20-356b-487a-80b8-aaea5650712b-audit-dir\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904981 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905021 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c1dc714-6c88-4db6-ac20-54b9771956a3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905045 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905072 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8lww\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905094 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v476\" (UniqueName: \"kubernetes.io/projected/a5eb1e8d-ffa8-4422-aa5d-852074436139-kube-api-access-7v476\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905715 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.904629 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: E0126 16:57:01.905719 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.405705291 +0000 UTC m=+149.989590878 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905960 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b727c\" (UniqueName: \"kubernetes.io/projected/0e850ae3-1746-4f97-9314-e6add8fb4fed-kube-api-access-b727c\") pod \"migrator-59844c95c7-5mz4s\" (UID: \"0e850ae3-1746-4f97-9314-e6add8fb4fed\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.905999 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906017 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906033 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906050 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-node-pullsecrets\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906068 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906086 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ae57544-4ad2-46e2-b205-3a770abfc47f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906103 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906126 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sghbt\" (UniqueName: \"kubernetes.io/projected/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-kube-api-access-sghbt\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.906217 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-config\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.912355 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd346f7-8914-4089-8e15-89085f8340d2-config\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.912653 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3cf8ef1-8f1a-4688-8443-600830a81400-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.913377 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.913522 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7978\" (UniqueName: \"kubernetes.io/projected/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-kube-api-access-s7978\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.908313 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.910589 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e225c1fe-ceb6-47c2-9721-cfaf39515364-serving-cert\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.911030 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.907095 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.907272 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.907936 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.908243 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wx8d6"] Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.913759 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd346f7-8914-4089-8e15-89085f8340d2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915021 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915127 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-encryption-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915229 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8vrh\" (UniqueName: \"kubernetes.io/projected/e225c1fe-ceb6-47c2-9721-cfaf39515364-kube-api-access-b8vrh\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915332 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915428 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915519 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915674 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmtnb\" (UniqueName: \"kubernetes.io/projected/e3cf8ef1-8f1a-4688-8443-600830a81400-kube-api-access-kmtnb\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915760 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-metrics-tls\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915786 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915816 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ae57544-4ad2-46e2-b205-3a770abfc47f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915836 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-audit-policies\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915864 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59d2n\" (UniqueName: \"kubernetes.io/projected/0af9b5a1-9cca-41dc-9a23-823a440db8c1-kube-api-access-59d2n\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915891 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit-dir\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915929 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-stats-auth\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915950 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.915972 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916025 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smgv8\" (UniqueName: \"kubernetes.io/projected/1a1bd8ec-d313-45c0-9859-880fe45c4342-kube-api-access-smgv8\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916043 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g5v2\" (UniqueName: \"kubernetes.io/projected/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-kube-api-access-8g5v2\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916061 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-metrics-certs\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916076 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916094 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pqjw\" (UniqueName: \"kubernetes.io/projected/c45cd232-7b33-4314-8060-842c5b6ccb6c-kube-api-access-6pqjw\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916111 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916127 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vdfv\" (UniqueName: \"kubernetes.io/projected/aafa6785-2823-4a2c-919d-b0276056337c-kube-api-access-7vdfv\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916142 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916162 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-webhook-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916222 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/55e71027-c397-4e49-adef-6f8d1c7d78d2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916273 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916321 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916346 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916371 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pq68\" (UniqueName: \"kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916402 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-trusted-ca\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916425 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xg97\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-kube-api-access-6xg97\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916471 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-config\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916497 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-profile-collector-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916530 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916548 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.916586 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917090 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917111 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-config\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917133 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917161 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-image-import-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917185 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-trusted-ca\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917206 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917247 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-encryption-config\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917265 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917282 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917298 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lskq5\" (UniqueName: \"kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917342 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj4m8\" (UniqueName: \"kubernetes.io/projected/44932794-d83d-4520-ab4c-21e5a7652506-kube-api-access-bj4m8\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917359 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klz22\" (UniqueName: \"kubernetes.io/projected/723c5d82-43eb-4833-b1d3-ad5236350fcc-kube-api-access-klz22\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917375 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkvc6\" (UniqueName: \"kubernetes.io/projected/092200be-8013-4057-b586-d321c27dc3fb-kube-api-access-dkvc6\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917391 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.917470 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.918097 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-config\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.918126 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e225c1fe-ceb6-47c2-9721-cfaf39515364-trusted-ca\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.919848 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.922331 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.941078 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.968270 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 16:57:01 crc kubenswrapper[4865]: I0126 16:57:01.980545 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.000195 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.017877 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.018030 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.518002523 +0000 UTC m=+150.101888110 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018131 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018168 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018198 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pq68\" (UniqueName: \"kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018224 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xg97\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-kube-api-access-6xg97\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018251 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-config\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018275 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-profile-collector-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018296 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018351 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018408 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018441 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018493 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-image-import-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018517 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-trusted-ca\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018540 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018564 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-encryption-config\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018724 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018754 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018791 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lskq5\" (UniqueName: \"kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018835 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj4m8\" (UniqueName: \"kubernetes.io/projected/44932794-d83d-4520-ab4c-21e5a7652506-kube-api-access-bj4m8\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018871 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klz22\" (UniqueName: \"kubernetes.io/projected/723c5d82-43eb-4833-b1d3-ad5236350fcc-kube-api-access-klz22\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018908 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkvc6\" (UniqueName: \"kubernetes.io/projected/092200be-8013-4057-b586-d321c27dc3fb-kube-api-access-dkvc6\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.018945 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019005 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019046 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b7ls\" (UniqueName: \"kubernetes.io/projected/9c1dc714-6c88-4db6-ac20-54b9771956a3-kube-api-access-8b7ls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019083 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019114 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-676w5\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-kube-api-access-676w5\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019149 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019176 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-serving-cert\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019216 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djb9j\" (UniqueName: \"kubernetes.io/projected/2918b19b-b96c-4b24-988d-f486d9cee307-kube-api-access-djb9j\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.019853 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-image-import-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.020465 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.020841 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021142 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-trusted-ca\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021563 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021593 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021634 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-images\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021657 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7cd346f7-8914-4089-8e15-89085f8340d2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021673 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021690 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021707 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021724 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltnmr\" (UniqueName: \"kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021742 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021771 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fllt7\" (UniqueName: \"kubernetes.io/projected/c62c3a20-356b-487a-80b8-aaea5650712b-kube-api-access-fllt7\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021792 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021816 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-client\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021838 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ae57544-4ad2-46e2-b205-3a770abfc47f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021859 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021875 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-machine-approver-tls\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021893 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9r5n\" (UniqueName: \"kubernetes.io/projected/08899e06-b9e4-4fda-b49b-879f8789ea40-kube-api-access-w9r5n\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021909 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5eb1e8d-ffa8-4422-aa5d-852074436139-service-ca-bundle\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021925 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-client\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021951 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcd2q\" (UniqueName: \"kubernetes.io/projected/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-kube-api-access-kcd2q\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.021983 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9k2g\" (UniqueName: \"kubernetes.io/projected/c9a81e77-6ca5-431f-96a8-2de21b7a214c-kube-api-access-c9k2g\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022019 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022037 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022076 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-client\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022115 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-serving-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022155 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjjjb\" (UniqueName: \"kubernetes.io/projected/55e71027-c397-4e49-adef-6f8d1c7d78d2-kube-api-access-mjjjb\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022178 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022205 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022234 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-metrics-tls\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022276 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-serving-cert\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022305 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96p5c\" (UniqueName: \"kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022335 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2918b19b-b96c-4b24-988d-f486d9cee307-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022358 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-srv-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022388 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/723c5d82-43eb-4833-b1d3-ad5236350fcc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022421 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022446 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-proxy-tls\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022475 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-registration-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022498 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022551 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.022579 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-csi-data-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024567 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkrz7\" (UniqueName: \"kubernetes.io/projected/9156ce7a-0206-4509-b090-d2e93c83f425-kube-api-access-wkrz7\") pod \"downloads-7954f5f757-7pntj\" (UID: \"9156ce7a-0206-4509-b090-d2e93c83f425\") " pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023503 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024589 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf27h\" (UniqueName: \"kubernetes.io/projected/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-kube-api-access-kf27h\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024616 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-socket-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023690 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023869 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024661 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjxrx\" (UniqueName: \"kubernetes.io/projected/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-kube-api-access-bjxrx\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024070 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024690 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-serving-cert\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024344 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/723c5d82-43eb-4833-b1d3-ad5236350fcc-available-featuregates\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024719 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-default-certificate\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024742 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024764 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-auth-proxy-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024791 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024813 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024847 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-service-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023621 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024867 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024895 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024964 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/723c5d82-43eb-4833-b1d3-ad5236350fcc-serving-cert\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025008 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0af9b5a1-9cca-41dc-9a23-823a440db8c1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023735 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-encryption-config\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025040 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025122 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-plugins-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025183 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025227 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025265 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/55e71027-c397-4e49-adef-6f8d1c7d78d2-proxy-tls\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025310 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5c5r\" (UniqueName: \"kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025344 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025385 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025420 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025454 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/092200be-8013-4057-b586-d321c27dc3fb-tmpfs\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025515 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c62c3a20-356b-487a-80b8-aaea5650712b-audit-dir\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025555 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025614 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025641 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c1dc714-6c88-4db6-ac20-54b9771956a3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025673 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025708 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v476\" (UniqueName: \"kubernetes.io/projected/a5eb1e8d-ffa8-4422-aa5d-852074436139-kube-api-access-7v476\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025733 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025756 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025780 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9xd9\" (UniqueName: \"kubernetes.io/projected/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-kube-api-access-l9xd9\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025808 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b727c\" (UniqueName: \"kubernetes.io/projected/0e850ae3-1746-4f97-9314-e6add8fb4fed-kube-api-access-b727c\") pod \"migrator-59844c95c7-5mz4s\" (UID: \"0e850ae3-1746-4f97-9314-e6add8fb4fed\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025846 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025873 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025899 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.025926 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-node-pullsecrets\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026051 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ae57544-4ad2-46e2-b205-3a770abfc47f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026114 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026147 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sghbt\" (UniqueName: \"kubernetes.io/projected/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-kube-api-access-sghbt\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026216 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-config\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026240 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd346f7-8914-4089-8e15-89085f8340d2-config\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026330 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3cf8ef1-8f1a-4688-8443-600830a81400-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026383 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026417 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7978\" (UniqueName: \"kubernetes.io/projected/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-kube-api-access-s7978\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026444 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd346f7-8914-4089-8e15-89085f8340d2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026467 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026493 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-encryption-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026527 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026550 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-mountpoint-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026574 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026603 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026629 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmtnb\" (UniqueName: \"kubernetes.io/projected/e3cf8ef1-8f1a-4688-8443-600830a81400-kube-api-access-kmtnb\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026684 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-metrics-tls\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026710 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25565\" (UniqueName: \"kubernetes.io/projected/477ea589-1909-4668-ab2a-585589a4c8b6-kube-api-access-25565\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026717 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026742 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026859 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ae57544-4ad2-46e2-b205-3a770abfc47f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026888 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-audit-policies\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026924 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59d2n\" (UniqueName: \"kubernetes.io/projected/0af9b5a1-9cca-41dc-9a23-823a440db8c1-kube-api-access-59d2n\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026954 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit-dir\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026970 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.026982 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-stats-auth\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.027130 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032172 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.023747 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.028169 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-machine-approver-tls\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.028416 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.028967 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.029587 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-client\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024497 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-serving-ca\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.030730 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-proxy-tls\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.030828 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-auth-proxy-config\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.030840 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-auth-proxy-config\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.030864 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.530840831 +0000 UTC m=+150.114726498 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032420 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032460 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smgv8\" (UniqueName: \"kubernetes.io/projected/1a1bd8ec-d313-45c0-9859-880fe45c4342-kube-api-access-smgv8\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032497 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g5v2\" (UniqueName: \"kubernetes.io/projected/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-kube-api-access-8g5v2\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032529 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-metrics-certs\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032553 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032576 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pqjw\" (UniqueName: \"kubernetes.io/projected/c45cd232-7b33-4314-8060-842c5b6ccb6c-kube-api-access-6pqjw\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032599 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032627 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vdfv\" (UniqueName: \"kubernetes.io/projected/aafa6785-2823-4a2c-919d-b0276056337c-kube-api-access-7vdfv\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032651 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032678 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-webhook-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032696 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/55e71027-c397-4e49-adef-6f8d1c7d78d2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032725 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bhmr\" (UniqueName: \"kubernetes.io/projected/40010721-d20f-43ea-966f-5a4a41d60a20-kube-api-access-6bhmr\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.033061 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.033300 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031442 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-metrics-tls\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.030937 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031191 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit-dir\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.033403 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.027415 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031375 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c62c3a20-356b-487a-80b8-aaea5650712b-audit-policies\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031403 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-etcd-client\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031417 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-serving-cert\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.033651 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.033828 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c62c3a20-356b-487a-80b8-aaea5650712b-serving-cert\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031764 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.024238 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.034184 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.034851 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.035698 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.035794 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-metrics-tls\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.035837 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.032629 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.035936 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c62c3a20-356b-487a-80b8-aaea5650712b-audit-dir\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.031681 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-node-pullsecrets\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.036321 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/092200be-8013-4057-b586-d321c27dc3fb-tmpfs\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.036375 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-audit\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.036644 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/55e71027-c397-4e49-adef-6f8d1c7d78d2-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.037254 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-encryption-config\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.037321 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.037661 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.037934 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.038574 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/723c5d82-43eb-4833-b1d3-ad5236350fcc-serving-cert\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.038884 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/9c1dc714-6c88-4db6-ac20-54b9771956a3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.040004 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.042450 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-images\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.044377 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.060478 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.099812 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.102879 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5eb1e8d-ffa8-4422-aa5d-852074436139-service-ca-bundle\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.119485 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133193 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.133288 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.63326067 +0000 UTC m=+150.217146257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133424 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-plugins-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133471 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133516 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133534 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9xd9\" (UniqueName: \"kubernetes.io/projected/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-kube-api-access-l9xd9\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133619 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-mountpoint-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133644 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25565\" (UniqueName: \"kubernetes.io/projected/477ea589-1909-4668-ab2a-585589a4c8b6-kube-api-access-25565\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133736 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bhmr\" (UniqueName: \"kubernetes.io/projected/40010721-d20f-43ea-966f-5a4a41d60a20-kube-api-access-6bhmr\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.133896 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134035 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-registration-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134064 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134082 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-socket-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134097 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-csi-data-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134316 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-csi-data-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134612 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-mountpoint-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134677 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-registration-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134761 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-plugins-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.134824 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-socket-dir\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.135276 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.635243017 +0000 UTC m=+150.219128604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.140168 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.154044 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-default-certificate\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.161378 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.171325 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-stats-auth\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.180556 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.187704 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a5eb1e8d-ffa8-4422-aa5d-852074436139-metrics-certs\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.199557 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.220780 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.224940 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/e3cf8ef1-8f1a-4688-8443-600830a81400-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.234845 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.235028 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.734984928 +0000 UTC m=+150.318870515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.235193 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.235557 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.735550015 +0000 UTC m=+150.319435602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.240660 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.250510 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.260482 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.280313 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.301212 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.314983 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-serving-cert\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.319008 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.327292 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-client\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.337308 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.337594 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.837559801 +0000 UTC m=+150.421445408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.338241 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.338603 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.838595161 +0000 UTC m=+150.422480748 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.340466 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.361103 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.381866 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.399654 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.403254 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-etcd-service-ca\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.420635 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.425153 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8ae57544-4ad2-46e2-b205-3a770abfc47f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.439043 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.439199 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.939162456 +0000 UTC m=+150.523048043 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.439511 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.439968 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:02.939957709 +0000 UTC m=+150.523843296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.440578 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.443344 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ae57544-4ad2-46e2-b205-3a770abfc47f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.460303 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.480639 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.501614 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.506533 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.520670 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.540488 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.540742 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.04070545 +0000 UTC m=+150.624591037 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.540888 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.540929 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.541665 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.041630296 +0000 UTC m=+150.625516063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.549235 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-config\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.560416 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.562725 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08899e06-b9e4-4fda-b49b-879f8789ea40-config\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.579819 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.601220 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.608060 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/55e71027-c397-4e49-adef-6f8d1c7d78d2-proxy-tls\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.620703 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.639845 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.642434 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.642622 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.142597053 +0000 UTC m=+150.726482640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.643406 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.643851 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.143839879 +0000 UTC m=+150.727725536 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.648475 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-webhook-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.648475 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/092200be-8013-4057-b586-d321c27dc3fb-apiservice-cert\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.660229 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.680687 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.699492 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.720121 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.739048 4865 request.go:700] Waited for 1.00799993s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator/secrets?fieldSelector=metadata.name%3Dkube-storage-version-migrator-sa-dockercfg-5xfcg&limit=500&resourceVersion=0 Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.740802 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.744608 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.744863 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.244831626 +0000 UTC m=+150.828717213 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.745307 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.745965 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.245947878 +0000 UTC m=+150.829833465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.760819 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.769333 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/2918b19b-b96c-4b24-988d-f486d9cee307-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.780213 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.801101 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.821311 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.825932 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd346f7-8914-4089-8e15-89085f8340d2-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.840631 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.842395 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd346f7-8914-4089-8e15-89085f8340d2-config\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.846610 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.847240 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.347223684 +0000 UTC m=+150.931109271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.860656 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.872685 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.873630 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.874226 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-profile-collector-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.880017 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.888606 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/c45cd232-7b33-4314-8060-842c5b6ccb6c-srv-cert\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.900520 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.921634 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.925287 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/0af9b5a1-9cca-41dc-9a23-823a440db8c1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.940455 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.948280 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:02 crc kubenswrapper[4865]: E0126 16:57:02.948940 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.448923022 +0000 UTC m=+151.032808609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.959873 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.965159 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.979775 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 16:57:02 crc kubenswrapper[4865]: I0126 16:57:02.999819 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.018644 4865 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.019158 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config podName:96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.519126576 +0000 UTC m=+151.103012163 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config") pod "service-ca-operator-777779d784-zbkcg" (UID: "96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4") : failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.018723 4865 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.019406 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key podName:aafa6785-2823-4a2c-919d-b0276056337c nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.519391804 +0000 UTC m=+151.103277391 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key") pod "service-ca-9c57cc56f-gzx5p" (UID: "aafa6785-2823-4a2c-919d-b0276056337c") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.019405 4865 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.019637 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca podName:ce5bf85d-dd65-4174-8fdc-10988986d665 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.51962774 +0000 UTC m=+151.103513327 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca") pod "marketplace-operator-79b997595-phb7b" (UID: "ce5bf85d-dd65-4174-8fdc-10988986d665") : failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.021172 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.030152 4865 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.030387 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics podName:ce5bf85d-dd65-4174-8fdc-10988986d665 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.530334998 +0000 UTC m=+151.114220625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics") pod "marketplace-operator-79b997595-phb7b" (UID: "ce5bf85d-dd65-4174-8fdc-10988986d665") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032660 4865 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032724 4865 configmap.go:193] Couldn't get configMap openshift-kube-storage-version-migrator-operator/config: failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032724 4865 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032802 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle podName:aafa6785-2823-4a2c-919d-b0276056337c nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.532786838 +0000 UTC m=+151.116672425 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle") pod "service-ca-9c57cc56f-gzx5p" (UID: "aafa6785-2823-4a2c-919d-b0276056337c") : failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032843 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config podName:c9a81e77-6ca5-431f-96a8-2de21b7a214c nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.532811999 +0000 UTC m=+151.116697586 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config") pod "kube-storage-version-migrator-operator-b67b599dd-b2bdh" (UID: "c9a81e77-6ca5-431f-96a8-2de21b7a214c") : failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.032864 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs podName:1a1bd8ec-d313-45c0-9859-880fe45c4342 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.53285716 +0000 UTC m=+151.116742747 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs") pod "machine-config-server-c26dz" (UID: "1a1bd8ec-d313-45c0-9859-880fe45c4342") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.033779 4865 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.033793 4865 secret.go:188] Couldn't get secret openshift-kube-storage-version-migrator-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.033934 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token podName:1a1bd8ec-d313-45c0-9859-880fe45c4342 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.53391131 +0000 UTC m=+151.117797147 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token") pod "machine-config-server-c26dz" (UID: "1a1bd8ec-d313-45c0-9859-880fe45c4342") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.034068 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert podName:c9a81e77-6ca5-431f-96a8-2de21b7a214c nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.534048364 +0000 UTC m=+151.117933951 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert") pod "kube-storage-version-migrator-operator-b67b599dd-b2bdh" (UID: "c9a81e77-6ca5-431f-96a8-2de21b7a214c") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.036014 4865 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.036083 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert podName:44932794-d83d-4520-ab4c-21e5a7652506 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.536060942 +0000 UTC m=+151.119946589 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert") pod "catalog-operator-68c6474976-kw5g8" (UID: "44932794-d83d-4520-ab4c-21e5a7652506") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.036959 4865 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.037026 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert podName:96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.537012709 +0000 UTC m=+151.120898376 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert") pod "service-ca-operator-777779d784-zbkcg" (UID: "96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.040045 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.049153 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.049306 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.549288601 +0000 UTC m=+151.133174188 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.049508 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.050042 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.550024562 +0000 UTC m=+151.133910149 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.059798 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.080060 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.099920 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.121531 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135297 4865 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135597 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert podName:477ea589-1909-4668-ab2a-585589a4c8b6 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.635573217 +0000 UTC m=+151.219458804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert") pod "ingress-canary-hn6rt" (UID: "477ea589-1909-4668-ab2a-585589a4c8b6") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135369 4865 secret.go:188] Couldn't get secret openshift-dns/dns-default-metrics-tls: failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135783 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls podName:40010721-d20f-43ea-966f-5a4a41d60a20 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.635773483 +0000 UTC m=+151.219659070 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls") pod "dns-default-gtdlb" (UID: "40010721-d20f-43ea-966f-5a4a41d60a20") : failed to sync secret cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135428 4865 configmap.go:193] Couldn't get configMap openshift-dns/dns-default: failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.135943 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume podName:40010721-d20f-43ea-966f-5a4a41d60a20 nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.635935157 +0000 UTC m=+151.219820744 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume") pod "dns-default-gtdlb" (UID: "40010721-d20f-43ea-966f-5a4a41d60a20") : failed to sync configmap cache: timed out waiting for the condition Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.140953 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.150601 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.150865 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.650847965 +0000 UTC m=+151.234733552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.151063 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.151426 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.651407441 +0000 UTC m=+151.235293028 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.160311 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.179557 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.200387 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.220893 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.240549 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.252320 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.252509 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.752483171 +0000 UTC m=+151.336368768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.252697 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.253196 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.753184951 +0000 UTC m=+151.337070548 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.259901 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.280105 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.306526 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.319416 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.384539 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.384723 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.884683761 +0000 UTC m=+151.468569348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.385913 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.386271 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.386784 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.387416 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.887388009 +0000 UTC m=+151.471273606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.405565 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g5d6\" (UniqueName: \"kubernetes.io/projected/6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df-kube-api-access-8g5d6\") pod \"authentication-operator-69f744f599-djm26\" (UID: \"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.416522 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npcbp\" (UniqueName: \"kubernetes.io/projected/4bd100a4-d04b-44f7-83fb-3a94f0fe90ae-kube-api-access-npcbp\") pod \"machine-api-operator-5694c8668f-4bktv\" (UID: \"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.434912 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f647n\" (UniqueName: \"kubernetes.io/projected/e6599bcb-4a91-4fdd-9101-877df6425122-kube-api-access-f647n\") pod \"openshift-apiserver-operator-796bbdcf4f-6sxkt\" (UID: \"e6599bcb-4a91-4fdd-9101-877df6425122\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.440374 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.460683 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.471683 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.479716 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.488353 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.488563 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.988533893 +0000 UTC m=+151.572419480 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.488803 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.489214 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:03.989199342 +0000 UTC m=+151.573085019 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.495748 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.500435 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.520357 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.540243 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.561017 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.580712 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.590351 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.590919 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.590973 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591032 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591051 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591102 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591147 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591191 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591281 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591309 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591344 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.591479 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.596250 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.096216372 +0000 UTC m=+151.680101979 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.597149 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-certs\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.597716 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9a81e77-6ca5-431f-96a8-2de21b7a214c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.598025 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/aafa6785-2823-4a2c-919d-b0276056337c-signing-key\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.598055 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.598373 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/aafa6785-2823-4a2c-919d-b0276056337c-signing-cabundle\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.599115 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-config\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.599135 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9a81e77-6ca5-431f-96a8-2de21b7a214c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.601357 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.602112 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/44932794-d83d-4520-ab4c-21e5a7652506-srv-cert\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.602295 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-serving-cert\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.603266 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1a1bd8ec-d313-45c0-9859-880fe45c4342-node-bootstrap-token\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.605210 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.619873 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.642887 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.679553 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcrms\" (UniqueName: \"kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms\") pod \"controller-manager-879f6c89f-nknkn\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.693016 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.693108 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.693220 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.693274 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.694140 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.194126691 +0000 UTC m=+151.778012278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.694221 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.695366 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/40010721-d20f-43ea-966f-5a4a41d60a20-config-volume\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.697329 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/40010721-d20f-43ea-966f-5a4a41d60a20-metrics-tls\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.700754 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/477ea589-1909-4668-ab2a-585589a4c8b6-cert\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.714307 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.723868 4865 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.724923 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8lww\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.735133 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-djm26"] Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.736159 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt"] Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.741220 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: W0126 16:57:03.745326 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6ac1ce1b_d6b5_41b4_9a6e_000553e6b5df.slice/crio-aa0b44c25125ca301031d4eb33b4e4e817cbf94bf47ae036cc8ac42027e0799c WatchSource:0}: Error finding container aa0b44c25125ca301031d4eb33b4e4e817cbf94bf47ae036cc8ac42027e0799c: Status 404 returned error can't find the container with id aa0b44c25125ca301031d4eb33b4e4e817cbf94bf47ae036cc8ac42027e0799c Jan 26 16:57:03 crc kubenswrapper[4865]: W0126 16:57:03.746752 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode6599bcb_4a91_4fdd_9101_877df6425122.slice/crio-4f28fcf2d7cfe502518e6d086a310b7b3de61fa2cd6eba6f7a32ec43753e4afe WatchSource:0}: Error finding container 4f28fcf2d7cfe502518e6d086a310b7b3de61fa2cd6eba6f7a32ec43753e4afe: Status 404 returned error can't find the container with id 4f28fcf2d7cfe502518e6d086a310b7b3de61fa2cd6eba6f7a32ec43753e4afe Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.758446 4865 request.go:700] Waited for 1.843209152s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.760534 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.795165 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.795360 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8vrh\" (UniqueName: \"kubernetes.io/projected/e225c1fe-ceb6-47c2-9721-cfaf39515364-kube-api-access-b8vrh\") pod \"console-operator-58897d9998-fl8hv\" (UID: \"e225c1fe-ceb6-47c2-9721-cfaf39515364\") " pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.795411 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.295376816 +0000 UTC m=+151.879262403 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.796315 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.796738 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.296721975 +0000 UTC m=+151.880607572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.811861 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.817680 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pq68\" (UniqueName: \"kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68\") pod \"route-controller-manager-6576b87f9c-6xswb\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.839886 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.841733 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xg97\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-kube-api-access-6xg97\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.856544 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-676w5\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-kube-api-access-676w5\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.878952 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lskq5\" (UniqueName: \"kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5\") pod \"oauth-openshift-558db77b4-npnhh\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.896866 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b7ls\" (UniqueName: \"kubernetes.io/projected/9c1dc714-6c88-4db6-ac20-54b9771956a3-kube-api-access-8b7ls\") pod \"cluster-samples-operator-665b6dd947-4b7rk\" (UID: \"9c1dc714-6c88-4db6-ac20-54b9771956a3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.899203 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:03 crc kubenswrapper[4865]: E0126 16:57:03.899490 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.399439482 +0000 UTC m=+151.983325079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.918078 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj4m8\" (UniqueName: \"kubernetes.io/projected/44932794-d83d-4520-ab4c-21e5a7652506-kube-api-access-bj4m8\") pod \"catalog-operator-68c6474976-kw5g8\" (UID: \"44932794-d83d-4520-ab4c-21e5a7652506\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.922824 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-4bktv"] Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.937773 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klz22\" (UniqueName: \"kubernetes.io/projected/723c5d82-43eb-4833-b1d3-ad5236350fcc-kube-api-access-klz22\") pod \"openshift-config-operator-7777fb866f-tndns\" (UID: \"723c5d82-43eb-4833-b1d3-ad5236350fcc\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.961937 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkvc6\" (UniqueName: \"kubernetes.io/projected/092200be-8013-4057-b586-d321c27dc3fb-kube-api-access-dkvc6\") pod \"packageserver-d55dfcdfc-l6hpd\" (UID: \"092200be-8013-4057-b586-d321c27dc3fb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.977589 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djb9j\" (UniqueName: \"kubernetes.io/projected/2918b19b-b96c-4b24-988d-f486d9cee307-kube-api-access-djb9j\") pod \"package-server-manager-789f6589d5-j2f55\" (UID: \"2918b19b-b96c-4b24-988d-f486d9cee307\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:03 crc kubenswrapper[4865]: I0126 16:57:03.993657 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.001140 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.001744 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.501711146 +0000 UTC m=+152.085596733 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.016943 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltnmr\" (UniqueName: \"kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr\") pod \"collect-profiles-29490765-lx6b6\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.025906 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.036566 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fllt7\" (UniqueName: \"kubernetes.io/projected/c62c3a20-356b-487a-80b8-aaea5650712b-kube-api-access-fllt7\") pod \"apiserver-7bbb656c7d-zkwx7\" (UID: \"c62c3a20-356b-487a-80b8-aaea5650712b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.039697 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.065942 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9r5n\" (UniqueName: \"kubernetes.io/projected/08899e06-b9e4-4fda-b49b-879f8789ea40-kube-api-access-w9r5n\") pod \"etcd-operator-b45778765-68xvn\" (UID: \"08899e06-b9e4-4fda-b49b-879f8789ea40\") " pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.095636 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.097237 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.098755 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dcb838a3-bd74-4860-a1e7-c9bacf4334ca-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-mjd22\" (UID: \"dcb838a3-bd74-4860-a1e7-c9bacf4334ca\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.101920 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.102572 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.602539149 +0000 UTC m=+152.186424736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.114970 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9k2g\" (UniqueName: \"kubernetes.io/projected/c9a81e77-6ca5-431f-96a8-2de21b7a214c-kube-api-access-c9k2g\") pod \"kube-storage-version-migrator-operator-b67b599dd-b2bdh\" (UID: \"c9a81e77-6ca5-431f-96a8-2de21b7a214c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.121852 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcd2q\" (UniqueName: \"kubernetes.io/projected/96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4-kube-api-access-kcd2q\") pod \"service-ca-operator-777779d784-zbkcg\" (UID: \"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.122384 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.124478 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.128011 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.146015 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjjjb\" (UniqueName: \"kubernetes.io/projected/55e71027-c397-4e49-adef-6f8d1c7d78d2-kube-api-access-mjjjb\") pod \"machine-config-controller-84d6567774-z8s8l\" (UID: \"55e71027-c397-4e49-adef-6f8d1c7d78d2\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.146194 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.153552 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.167314 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96p5c\" (UniqueName: \"kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c\") pod \"console-f9d7485db-dsngx\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.176735 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkrz7\" (UniqueName: \"kubernetes.io/projected/9156ce7a-0206-4509-b090-d2e93c83f425-kube-api-access-wkrz7\") pod \"downloads-7954f5f757-7pntj\" (UID: \"9156ce7a-0206-4509-b090-d2e93c83f425\") " pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.184487 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.192550 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.197601 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf27h\" (UniqueName: \"kubernetes.io/projected/5418254d-28ed-4c8e-b0d5-70ac0fe883ee-kube-api-access-kf27h\") pod \"machine-config-operator-74547568cd-shl5n\" (UID: \"5418254d-28ed-4c8e-b0d5-70ac0fe883ee\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.198777 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.203565 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.205562 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.705541584 +0000 UTC m=+152.289427171 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.206741 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.216206 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" event={"ID":"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df","Type":"ContainerStarted","Data":"aa0b44c25125ca301031d4eb33b4e4e817cbf94bf47ae036cc8ac42027e0799c"} Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.216457 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8ae57544-4ad2-46e2-b205-3a770abfc47f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-mxn5w\" (UID: \"8ae57544-4ad2-46e2-b205-3a770abfc47f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.217501 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" event={"ID":"e6599bcb-4a91-4fdd-9101-877df6425122","Type":"ContainerStarted","Data":"4f28fcf2d7cfe502518e6d086a310b7b3de61fa2cd6eba6f7a32ec43753e4afe"} Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.217586 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-tndns"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.218369 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" event={"ID":"3452d3f1-d080-412d-a1da-0dc0d3776d5f","Type":"ContainerStarted","Data":"84df4ea757b9d176cc74612c702effa97b11248e7d73714c966a9c1228ca50c5"} Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.219238 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" event={"ID":"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae","Type":"ContainerStarted","Data":"8da1b3e655b7544927d5a3f23a1230943a53e5874c3b2132924b951c8042772d"} Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.230469 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.234853 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjxrx\" (UniqueName: \"kubernetes.io/projected/d3d19ca3-d0e7-4ef8-9112-c66a479404d2-kube-api-access-bjxrx\") pod \"dns-operator-744455d44c-mfq2t\" (UID: \"d3d19ca3-d0e7-4ef8-9112-c66a479404d2\") " pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.254790 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-vzw8s\" (UID: \"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.261963 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.270440 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.274314 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/82bc8cef-2bec-4d01-a1a1-fd9c19c830a2-bound-sa-token\") pod \"ingress-operator-5b745b69d9-7kt64\" (UID: \"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.294830 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59d2n\" (UniqueName: \"kubernetes.io/projected/0af9b5a1-9cca-41dc-9a23-823a440db8c1-kube-api-access-59d2n\") pod \"control-plane-machine-set-operator-78cbb6b69f-kvvch\" (UID: \"0af9b5a1-9cca-41dc-9a23-823a440db8c1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.303768 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.304437 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.304589 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.804562005 +0000 UTC m=+152.388447592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.304768 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.305141 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.805134011 +0000 UTC m=+152.389019598 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.350963 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.355908 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7978\" (UniqueName: \"kubernetes.io/projected/d6c003d6-66db-4a16-86a2-ba0d03cf1a25-kube-api-access-s7978\") pod \"apiserver-76f77b778f-6lhmn\" (UID: \"d6c003d6-66db-4a16-86a2-ba0d03cf1a25\") " pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.356098 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.366324 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5c5r\" (UniqueName: \"kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r\") pod \"marketplace-operator-79b997595-phb7b\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.367634 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.382089 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmtnb\" (UniqueName: \"kubernetes.io/projected/e3cf8ef1-8f1a-4688-8443-600830a81400-kube-api-access-kmtnb\") pod \"multus-admission-controller-857f4d67dd-vsrcj\" (UID: \"e3cf8ef1-8f1a-4688-8443-600830a81400\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.395879 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smgv8\" (UniqueName: \"kubernetes.io/projected/1a1bd8ec-d313-45c0-9859-880fe45c4342-kube-api-access-smgv8\") pod \"machine-config-server-c26dz\" (UID: \"1a1bd8ec-d313-45c0-9859-880fe45c4342\") " pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.399601 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.405595 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.405827 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:04.90580207 +0000 UTC m=+152.489687657 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.405936 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.412780 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.418425 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g5v2\" (UniqueName: \"kubernetes.io/projected/9fc43392-6eaa-4e4c-bffb-42bdbc556d4e-kube-api-access-8g5v2\") pod \"openshift-controller-manager-operator-756b6f6bc6-mdgf6\" (UID: \"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.424868 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-fl8hv"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.429812 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sghbt\" (UniqueName: \"kubernetes.io/projected/d63ddf25-4c00-4aec-b51a-5c80fe2a5810-kube-api-access-sghbt\") pod \"machine-approver-56656f9798-mq42v\" (UID: \"d63ddf25-4c00-4aec-b51a-5c80fe2a5810\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.432948 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7cd346f7-8914-4089-8e15-89085f8340d2-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qd267\" (UID: \"7cd346f7-8914-4089-8e15-89085f8340d2\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.443111 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pqjw\" (UniqueName: \"kubernetes.io/projected/c45cd232-7b33-4314-8060-842c5b6ccb6c-kube-api-access-6pqjw\") pod \"olm-operator-6b444d44fb-g66fj\" (UID: \"c45cd232-7b33-4314-8060-842c5b6ccb6c\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.456223 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.462696 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b727c\" (UniqueName: \"kubernetes.io/projected/0e850ae3-1746-4f97-9314-e6add8fb4fed-kube-api-access-b727c\") pod \"migrator-59844c95c7-5mz4s\" (UID: \"0e850ae3-1746-4f97-9314-e6add8fb4fed\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.464942 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.473244 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.474912 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v476\" (UniqueName: \"kubernetes.io/projected/a5eb1e8d-ffa8-4422-aa5d-852074436139-kube-api-access-7v476\") pod \"router-default-5444994796-gx9t9\" (UID: \"a5eb1e8d-ffa8-4422-aa5d-852074436139\") " pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.488082 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.495025 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.495692 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vdfv\" (UniqueName: \"kubernetes.io/projected/aafa6785-2823-4a2c-919d-b0276056337c-kube-api-access-7vdfv\") pod \"service-ca-9c57cc56f-gzx5p\" (UID: \"aafa6785-2823-4a2c-919d-b0276056337c\") " pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.506847 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.507169 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.007156098 +0000 UTC m=+152.591041685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.513500 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.513552 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.516242 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.521209 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.527744 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-c26dz" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.536365 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9xd9\" (UniqueName: \"kubernetes.io/projected/e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2-kube-api-access-l9xd9\") pod \"csi-hostpathplugin-wx8d6\" (UID: \"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2\") " pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.557446 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bhmr\" (UniqueName: \"kubernetes.io/projected/40010721-d20f-43ea-966f-5a4a41d60a20-kube-api-access-6bhmr\") pod \"dns-default-gtdlb\" (UID: \"40010721-d20f-43ea-966f-5a4a41d60a20\") " pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.562906 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.576320 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25565\" (UniqueName: \"kubernetes.io/projected/477ea589-1909-4668-ab2a-585589a4c8b6-kube-api-access-25565\") pod \"ingress-canary-hn6rt\" (UID: \"477ea589-1909-4668-ab2a-585589a4c8b6\") " pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.576367 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.579472 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.607463 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.609208 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.109177465 +0000 UTC m=+152.693063062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.648495 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-68xvn"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.673553 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.680112 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.709409 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.709848 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.209821922 +0000 UTC m=+152.793707509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.737130 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.740559 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-npnhh"] Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.810606 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.810795 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.310766548 +0000 UTC m=+152.894652125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.810959 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.811339 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.311322954 +0000 UTC m=+152.895208541 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.840141 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-hn6rt" Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.844569 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:04 crc kubenswrapper[4865]: W0126 16:57:04.868789 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08899e06_b9e4_4fda_b49b_879f8789ea40.slice/crio-8782181cdbcdc17832c36c5255e505e02a24eb7a4198ccfc878a02facf6462a4 WatchSource:0}: Error finding container 8782181cdbcdc17832c36c5255e505e02a24eb7a4198ccfc878a02facf6462a4: Status 404 returned error can't find the container with id 8782181cdbcdc17832c36c5255e505e02a24eb7a4198ccfc878a02facf6462a4 Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.911452 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.911726 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.411679713 +0000 UTC m=+152.995565300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:04 crc kubenswrapper[4865]: I0126 16:57:04.920766 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:04 crc kubenswrapper[4865]: E0126 16:57:04.927300 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.427282241 +0000 UTC m=+153.011167818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.036807 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.037221 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.537202475 +0000 UTC m=+153.121088062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.104132 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh"] Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.146467 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.146837 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.64682258 +0000 UTC m=+153.230708167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.268912 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.269482 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.769456948 +0000 UTC m=+153.353342535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.352902 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" event={"ID":"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae","Type":"ContainerStarted","Data":"aa06d6ed50be8d3a9c016ed257ed3d2922028f4520d7488542caf6001fee3d91"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.378020 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.378606 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.878592069 +0000 UTC m=+153.462477656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: W0126 16:57:05.410059 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5eb1e8d_ffa8_4422_aa5d_852074436139.slice/crio-b232e680fbcc18c031722fc3ad8500c17f5a791f83ed1158431642bd1dec46ee WatchSource:0}: Error finding container b232e680fbcc18c031722fc3ad8500c17f5a791f83ed1158431642bd1dec46ee: Status 404 returned error can't find the container with id b232e680fbcc18c031722fc3ad8500c17f5a791f83ed1158431642bd1dec46ee Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.459917 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" event={"ID":"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043","Type":"ContainerStarted","Data":"67c87b6a183e6e1197b97ad8bb09cf465ce4f792818320f3a2e61465af857f13"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.484596 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.484963 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:05.984945201 +0000 UTC m=+153.568830788 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.553517 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c26dz" event={"ID":"1a1bd8ec-d313-45c0-9859-880fe45c4342","Type":"ContainerStarted","Data":"e5d58fa99c4170d1c1f484865d4c7b723b481340144c883b460590130d226a63"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.555219 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" event={"ID":"08899e06-b9e4-4fda-b49b-879f8789ea40","Type":"ContainerStarted","Data":"8782181cdbcdc17832c36c5255e505e02a24eb7a4198ccfc878a02facf6462a4"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.558576 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" event={"ID":"6ac1ce1b-d6b5-41b4-9a6e-000553e6b5df","Type":"ContainerStarted","Data":"caac3fed9d1de4b4f228e3d05e0d1ad89fabde3ffd01120e0b60533f539b67fa"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.563323 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" event={"ID":"e225c1fe-ceb6-47c2-9721-cfaf39515364","Type":"ContainerStarted","Data":"d210997136ceabb56328de865179c14a9ca66946ff0074a13f851af4d61f6e8e"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.575417 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" event={"ID":"c62c3a20-356b-487a-80b8-aaea5650712b","Type":"ContainerStarted","Data":"f43e6e1165ef8d47e657ec2bd4b173426cd942309dfd098c0a0fc5a037fa68f8"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.586463 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.586826 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.086814394 +0000 UTC m=+153.670699981 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.589765 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" event={"ID":"e6599bcb-4a91-4fdd-9101-877df6425122","Type":"ContainerStarted","Data":"34d433f375c62c6e89b8606bc88614079ae972547200e1899a08ffc19fb120c8"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.592367 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" event={"ID":"3452d3f1-d080-412d-a1da-0dc0d3776d5f","Type":"ContainerStarted","Data":"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.596568 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.600905 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" event={"ID":"723c5d82-43eb-4833-b1d3-ad5236350fcc","Type":"ContainerStarted","Data":"843a9ca321067649cc6bb0b70dc1da4c2381d17f75d4713e4309013754895ffc"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.607757 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" event={"ID":"d63ddf25-4c00-4aec-b51a-5c80fe2a5810","Type":"ContainerStarted","Data":"15aa7012a85e56c8b1b97a7d85d453b14272881eec3fb11584442e6a87adb1a9"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.609296 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" event={"ID":"8731191c-a1c3-4422-bcac-e6e6cfec649f","Type":"ContainerStarted","Data":"60509dbd7fe35764fe639c159f5687be95ef5be5b1d76e630aab4e49f76d0ada"} Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.611920 4865 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-nknkn container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.611959 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.9:8443/healthz\": dial tcp 10.217.0.9:8443: connect: connection refused" Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.688305 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.688421 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.188394348 +0000 UTC m=+153.772279945 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.691322 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.191309722 +0000 UTC m=+153.775195309 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.693111 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.794439 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.808709 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.308668129 +0000 UTC m=+153.892553726 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:05 crc kubenswrapper[4865]: I0126 16:57:05.897681 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:05 crc kubenswrapper[4865]: E0126 16:57:05.898568 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.398555448 +0000 UTC m=+153.982441035 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.000148 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.001028 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.500839422 +0000 UTC m=+154.084725029 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.108844 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.109329 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.609315035 +0000 UTC m=+154.193200622 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.210213 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.210636 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.710618551 +0000 UTC m=+154.294504138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.312835 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.313356 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.813338868 +0000 UTC m=+154.397224455 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.368810 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-djm26" podStartSLOduration=123.368792039 podStartE2EDuration="2m3.368792039s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.367185863 +0000 UTC m=+153.951071470" watchObservedRunningTime="2026-01-26 16:57:06.368792039 +0000 UTC m=+153.952677626" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.402381 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" podStartSLOduration=122.402360722 podStartE2EDuration="2m2.402360722s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.398025188 +0000 UTC m=+153.981910775" watchObservedRunningTime="2026-01-26 16:57:06.402360722 +0000 UTC m=+153.986246309" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.416812 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj"] Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.417171 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.917134626 +0000 UTC m=+154.501020213 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.417863 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.418296 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.426284 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:06.918732482 +0000 UTC m=+154.502618069 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.449112 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-6sxkt" podStartSLOduration=123.449088813 podStartE2EDuration="2m3.449088813s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.442298138 +0000 UTC m=+154.026183725" watchObservedRunningTime="2026-01-26 16:57:06.449088813 +0000 UTC m=+154.032974400" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.486612 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.492875 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mfq2t"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.496596 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.519487 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.519947 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.019926565 +0000 UTC m=+154.603812142 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.617457 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" event={"ID":"d63ddf25-4c00-4aec-b51a-5c80fe2a5810","Type":"ContainerStarted","Data":"c7e3e40cb77c5f832706116e54bd8d94a293f1cd59a190e63653a76132e62c0d"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.620607 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gx9t9" event={"ID":"a5eb1e8d-ffa8-4422-aa5d-852074436139","Type":"ContainerStarted","Data":"0dfdbb35f7b183a1594e88d700f4e07a2ed48c635d2e1627095c0f189e519e6c"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.620668 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-gx9t9" event={"ID":"a5eb1e8d-ffa8-4422-aa5d-852074436139","Type":"ContainerStarted","Data":"b232e680fbcc18c031722fc3ad8500c17f5a791f83ed1158431642bd1dec46ee"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.621096 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.621964 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.121536731 +0000 UTC m=+154.705422318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.624146 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" event={"ID":"8731191c-a1c3-4422-bcac-e6e6cfec649f","Type":"ContainerStarted","Data":"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.624464 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.628484 4865 generic.go:334] "Generic (PLEG): container finished" podID="c62c3a20-356b-487a-80b8-aaea5650712b" containerID="7c1bf76e554b186fcda882d461fb98787badbd33de4af98b7eef2da56be8e1ab" exitCode=0 Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.628602 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" event={"ID":"c62c3a20-356b-487a-80b8-aaea5650712b","Type":"ContainerDied","Data":"7c1bf76e554b186fcda882d461fb98787badbd33de4af98b7eef2da56be8e1ab"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.632846 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-c26dz" event={"ID":"1a1bd8ec-d313-45c0-9859-880fe45c4342","Type":"ContainerStarted","Data":"04044d69b6bde8bbfc03447e8b7e1e69225b72e04b8dc1a9c3272e3ba7604a6d"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.636706 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" event={"ID":"08899e06-b9e4-4fda-b49b-879f8789ea40","Type":"ContainerStarted","Data":"aff6d0876654597f2e8af5d5c643351e59ea48921ffb302121de69761bfa65ac"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.641329 4865 generic.go:334] "Generic (PLEG): container finished" podID="723c5d82-43eb-4833-b1d3-ad5236350fcc" containerID="a49ae7d8311e8850aa71b8ec7459f2c2b7c10b26e4bd23d68d36af004a185f68" exitCode=0 Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.641468 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" event={"ID":"723c5d82-43eb-4833-b1d3-ad5236350fcc","Type":"ContainerDied","Data":"a49ae7d8311e8850aa71b8ec7459f2c2b7c10b26e4bd23d68d36af004a185f68"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.649285 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" event={"ID":"c45cd232-7b33-4314-8060-842c5b6ccb6c","Type":"ContainerStarted","Data":"bfbea18e115f2616894f8c3bf0c4c49aaf7251dabb9b007a3b489cfd6aedeff3"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.651932 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-gx9t9" podStartSLOduration=122.651890232 podStartE2EDuration="2m2.651890232s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.642035029 +0000 UTC m=+154.225920626" watchObservedRunningTime="2026-01-26 16:57:06.651890232 +0000 UTC m=+154.235775829" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.652658 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" event={"ID":"c9a81e77-6ca5-431f-96a8-2de21b7a214c","Type":"ContainerStarted","Data":"5d2bb1c25d6f48df9498fe4ad58815f3e21bb16e8a4438bcf575821a6a0b1b1c"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.652707 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" event={"ID":"c9a81e77-6ca5-431f-96a8-2de21b7a214c","Type":"ContainerStarted","Data":"d8e5d4af315cc5e8f70842b549d2310e93e5787d4d6389087661300afab9b2f5"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.656195 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" event={"ID":"4bd100a4-d04b-44f7-83fb-3a94f0fe90ae","Type":"ContainerStarted","Data":"101943453bb0b90efc7af89b089be6c9c4a5339fb92a299094f2d33ec3eb4c40"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.662447 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-c26dz" podStartSLOduration=5.662428494 podStartE2EDuration="5.662428494s" podCreationTimestamp="2026-01-26 16:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.65881081 +0000 UTC m=+154.242696397" watchObservedRunningTime="2026-01-26 16:57:06.662428494 +0000 UTC m=+154.246314081" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.670950 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" event={"ID":"e225c1fe-ceb6-47c2-9721-cfaf39515364","Type":"ContainerStarted","Data":"383a27a2a02df64062e4996434935be409b127caa2b8f1f721e081b21154ef2f"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.671435 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.676159 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" event={"ID":"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043","Type":"ContainerStarted","Data":"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8"} Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.680205 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.680271 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.722930 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.723387 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.723079 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.723913 4865 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-npnhh container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" start-of-body= Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.723948 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": dial tcp 10.217.0.11:6443: connect: connection refused" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.723918 4865 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6xswb container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.724025 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.730735 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.230679572 +0000 UTC m=+154.814565169 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.730980 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.735907 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" podStartSLOduration=122.735882391 podStartE2EDuration="2m2.735882391s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.686095753 +0000 UTC m=+154.269981340" watchObservedRunningTime="2026-01-26 16:57:06.735882391 +0000 UTC m=+154.319767978" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.768541 4865 patch_prober.go:28] interesting pod/console-operator-58897d9998-fl8hv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.768614 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" podUID="e225c1fe-ceb6-47c2-9721-cfaf39515364" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.22:8443/readyz\": dial tcp 10.217.0.22:8443: connect: connection refused" Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.774361 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.274340535 +0000 UTC m=+154.858226122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.786354 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-68xvn" podStartSLOduration=122.786331139 podStartE2EDuration="2m2.786331139s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.781665725 +0000 UTC m=+154.365551382" watchObservedRunningTime="2026-01-26 16:57:06.786331139 +0000 UTC m=+154.370216726" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.802447 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.848021 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.851044 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.351013625 +0000 UTC m=+154.934899212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.872039 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.372020197 +0000 UTC m=+154.955905784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.879292 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.893942 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-b2bdh" podStartSLOduration=122.893915245 podStartE2EDuration="2m2.893915245s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.864265395 +0000 UTC m=+154.448150992" watchObservedRunningTime="2026-01-26 16:57:06.893915245 +0000 UTC m=+154.477800852" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.914114 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" podStartSLOduration=123.914090424 podStartE2EDuration="2m3.914090424s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:06.913110746 +0000 UTC m=+154.496996333" watchObservedRunningTime="2026-01-26 16:57:06.914090424 +0000 UTC m=+154.497976011" Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.918259 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.949520 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.954119 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.982612 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l"] Jan 26 16:57:06 crc kubenswrapper[4865]: I0126 16:57:06.983228 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:06 crc kubenswrapper[4865]: E0126 16:57:06.983796 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.483619899 +0000 UTC m=+155.067505486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: W0126 16:57:07.020404 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55e71027_c397_4e49_adef_6f8d1c7d78d2.slice/crio-f7fd65c247af0b3ee010369d54c108136ec4e837c97f781a3cbb4477cac2b68c WatchSource:0}: Error finding container f7fd65c247af0b3ee010369d54c108136ec4e837c97f781a3cbb4477cac2b68c: Status 404 returned error can't find the container with id f7fd65c247af0b3ee010369d54c108136ec4e837c97f781a3cbb4477cac2b68c Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.034929 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.053385 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.080528 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.082137 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7pntj"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.086812 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.086941 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" podStartSLOduration=124.086924682 podStartE2EDuration="2m4.086924682s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:07.05058837 +0000 UTC m=+154.634473977" watchObservedRunningTime="2026-01-26 16:57:07.086924682 +0000 UTC m=+154.670810269" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.087686 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267"] Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.087271 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.587254361 +0000 UTC m=+155.171139948 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: W0126 16:57:07.131215 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2918b19b_b96c_4b24_988d_f486d9cee307.slice/crio-f1b5438d9aee62c613f9449249f710a4fb5d9eb1d4911921c6fb327429877f35 WatchSource:0}: Error finding container f1b5438d9aee62c613f9449249f710a4fb5d9eb1d4911921c6fb327429877f35: Status 404 returned error can't find the container with id f1b5438d9aee62c613f9449249f710a4fb5d9eb1d4911921c6fb327429877f35 Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.187392 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.188616 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.688593609 +0000 UTC m=+155.272479186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.280519 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-4bktv" podStartSLOduration=123.280495956 podStartE2EDuration="2m3.280495956s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:07.122654037 +0000 UTC m=+154.706539634" watchObservedRunningTime="2026-01-26 16:57:07.280495956 +0000 UTC m=+154.864381543" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.283203 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-hn6rt"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.293011 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.293400 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.793386745 +0000 UTC m=+155.377272322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.310741 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.310806 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.330340 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.397223 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.397766 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:07.89774358 +0000 UTC m=+155.481629167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: W0126 16:57:07.398267 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44932794_d83d_4520_ab4c_21e5a7652506.slice/crio-94d37428d7f060a1d73e3ed81eab9cae285560df55ab143d4fbf6417fa34f0f6 WatchSource:0}: Error finding container 94d37428d7f060a1d73e3ed81eab9cae285560df55ab143d4fbf6417fa34f0f6: Status 404 returned error can't find the container with id 94d37428d7f060a1d73e3ed81eab9cae285560df55ab143d4fbf6417fa34f0f6 Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.459070 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.531718 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.532809 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.533315 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.033297079 +0000 UTC m=+155.617182666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: W0126 16:57:07.546629 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod477ea589_1909_4668_ab2a_585589a4c8b6.slice/crio-0578c48fa70876c73a370b02ffddba9a407aa049df09a3a876a5fc65ee495231 WatchSource:0}: Error finding container 0578c48fa70876c73a370b02ffddba9a407aa049df09a3a876a5fc65ee495231: Status 404 returned error can't find the container with id 0578c48fa70876c73a370b02ffddba9a407aa049df09a3a876a5fc65ee495231 Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.595938 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.608100 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6lhmn"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.620948 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gtdlb"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.633861 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.634540 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.635165 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.135140471 +0000 UTC m=+155.719026058 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.685327 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" event={"ID":"8ae57544-4ad2-46e2-b205-3a770abfc47f","Type":"ContainerStarted","Data":"c194f44bf88f10391b59d83953f6b8501ce0a6ce2fc5052ec32ce16e59925230"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.686536 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" event={"ID":"5418254d-28ed-4c8e-b0d5-70ac0fe883ee","Type":"ContainerStarted","Data":"f747e57e2ae3e033819250957b5fae442b3e292eb20b7190a95eb6c8dce22a37"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.688494 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" event={"ID":"092200be-8013-4057-b586-d321c27dc3fb","Type":"ContainerStarted","Data":"1cc1598a9a4868e1002e21211b3c73535052e8fc436a726c98c6570301501f35"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.694075 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:07 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:07 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:07 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.694122 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.706121 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" event={"ID":"d63ddf25-4c00-4aec-b51a-5c80fe2a5810","Type":"ContainerStarted","Data":"4fb6786e82e11754f8320759d4d99ed47a9138da0b6b7e04c84de7257cdd7870"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.708737 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" event={"ID":"dcb838a3-bd74-4860-a1e7-c9bacf4334ca","Type":"ContainerStarted","Data":"9727f3a42735be9d2bbe6a3c26ea6eceb1d073445e47d936596e6a1ca77eba3f"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.710966 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" event={"ID":"c45cd232-7b33-4314-8060-842c5b6ccb6c","Type":"ContainerStarted","Data":"e7140f970f6caaae9efbd5689be00d30ae5db50c5c572d9e80bf40a6ff1f2d45"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.711955 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.726402 4865 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-g66fj container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" start-of-body= Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.726456 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" podUID="c45cd232-7b33-4314-8060-842c5b6ccb6c" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.36:8443/healthz\": dial tcp 10.217.0.36:8443: connect: connection refused" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.734889 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-hn6rt" event={"ID":"477ea589-1909-4668-ab2a-585589a4c8b6","Type":"ContainerStarted","Data":"0578c48fa70876c73a370b02ffddba9a407aa049df09a3a876a5fc65ee495231"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.736229 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.736568 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.23655484 +0000 UTC m=+155.820440427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.744135 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-gzx5p"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.752548 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.752593 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-vsrcj"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.770048 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wx8d6"] Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.775690 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-mq42v" podStartSLOduration=124.775661732 podStartE2EDuration="2m4.775661732s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:07.73829148 +0000 UTC m=+155.322177067" watchObservedRunningTime="2026-01-26 16:57:07.775661732 +0000 UTC m=+155.359547319" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.776310 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" event={"ID":"7cd346f7-8914-4089-8e15-89085f8340d2","Type":"ContainerStarted","Data":"42ae6799a2112520cd5233976f78bd137ed05aa62bc35eeeac7a829ecd817204"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.783281 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" podStartSLOduration=123.78325556 podStartE2EDuration="2m3.78325556s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:07.768573779 +0000 UTC m=+155.352459366" watchObservedRunningTime="2026-01-26 16:57:07.78325556 +0000 UTC m=+155.367141137" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.789891 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" event={"ID":"d3d19ca3-d0e7-4ef8-9112-c66a479404d2","Type":"ContainerStarted","Data":"d224858f4b55a8d9a1411e517b479fec66644a483c3ec312098c99e1208c0c6b"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.802435 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" event={"ID":"55e71027-c397-4e49-adef-6f8d1c7d78d2","Type":"ContainerStarted","Data":"f7fd65c247af0b3ee010369d54c108136ec4e837c97f781a3cbb4477cac2b68c"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.827626 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" event={"ID":"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4","Type":"ContainerStarted","Data":"cd8e3e635bf6905e859ffce94b3f31f97c81a9fbcfc492ef38d9ec2794bcaf7b"} Jan 26 16:57:07 crc kubenswrapper[4865]: W0126 16:57:07.828784 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce5bf85d_dd65_4174_8fdc_10988986d665.slice/crio-cc5d1e460c5cdff15cc7001bb603dc4a93626e259a4c356270bfa5786f57c2e5 WatchSource:0}: Error finding container cc5d1e460c5cdff15cc7001bb603dc4a93626e259a4c356270bfa5786f57c2e5: Status 404 returned error can't find the container with id cc5d1e460c5cdff15cc7001bb603dc4a93626e259a4c356270bfa5786f57c2e5 Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.838375 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.839978 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.339951897 +0000 UTC m=+155.923837484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.863551 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" event={"ID":"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2","Type":"ContainerStarted","Data":"ccdaf0f68df1d4464298cb7a2c54127226242428e24eec2ef991220699f5bc6d"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.896227 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" event={"ID":"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b","Type":"ContainerStarted","Data":"410bd3f09fb0904249b0f9a2102678678afa9b79f084e429c90b2554d820ea5c"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.900303 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" event={"ID":"44932794-d83d-4520-ab4c-21e5a7652506","Type":"ContainerStarted","Data":"94d37428d7f060a1d73e3ed81eab9cae285560df55ab143d4fbf6417fa34f0f6"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.903816 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" event={"ID":"723c5d82-43eb-4833-b1d3-ad5236350fcc","Type":"ContainerStarted","Data":"702eea25fbb313154ff0522bdedd37cc2f9a4831ee4d7c6bfae397a3559dfb11"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.910243 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.911601 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" event={"ID":"0e850ae3-1746-4f97-9314-e6add8fb4fed","Type":"ContainerStarted","Data":"f3a9569293a6163c13d5318c4b921eb6a734d772ad87c9585da435f66ea0d9cc"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.915634 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" event={"ID":"2918b19b-b96c-4b24-988d-f486d9cee307","Type":"ContainerStarted","Data":"f1b5438d9aee62c613f9449249f710a4fb5d9eb1d4911921c6fb327429877f35"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.920561 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7pntj" event={"ID":"9156ce7a-0206-4509-b090-d2e93c83f425","Type":"ContainerStarted","Data":"2faf4ead6c95c1bb5940c28741d9671100592139f9f67491e5fb897c63464641"} Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.921757 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" podStartSLOduration=124.921742503 podStartE2EDuration="2m4.921742503s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:07.921167897 +0000 UTC m=+155.505053504" watchObservedRunningTime="2026-01-26 16:57:07.921742503 +0000 UTC m=+155.505628090" Jan 26 16:57:07 crc kubenswrapper[4865]: I0126 16:57:07.939926 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:07 crc kubenswrapper[4865]: E0126 16:57:07.940353 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.440341067 +0000 UTC m=+156.024226654 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.000838 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.041221 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.043099 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.543077875 +0000 UTC m=+156.126963462 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.143604 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.144166 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.644143104 +0000 UTC m=+156.228028731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.246392 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.246754 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.746736188 +0000 UTC m=+156.330621775 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.357162 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.357573 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.857558557 +0000 UTC m=+156.441444144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.458934 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.459350 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.959315937 +0000 UTC m=+156.543201534 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.459713 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.460164 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:08.96014753 +0000 UTC m=+156.544033137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.547631 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-fl8hv" Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.560420 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.560903 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.060882721 +0000 UTC m=+156.644768308 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.666685 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.667120 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.167106258 +0000 UTC m=+156.750991855 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.680305 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:08 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:08 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:08 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.680390 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.768068 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.768387 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.268364793 +0000 UTC m=+156.852250380 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.877865 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:08 crc kubenswrapper[4865]: E0126 16:57:08.878752 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.37873693 +0000 UTC m=+156.962622517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.932100 4865 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-npnhh container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:57:08 crc kubenswrapper[4865]: I0126 16:57:08.932179 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.11:6443/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:08.982767 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:08.983094 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.483075804 +0000 UTC m=+157.066961391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.031683 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" event={"ID":"96e731f1-c61c-4bb3-9a87-d0d2a85ec1e4","Type":"ContainerStarted","Data":"2d7f3053650e88d50eba9cfba716103f8be40cd340b6aae6cc01c0b741801d0f"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.034481 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gtdlb" event={"ID":"40010721-d20f-43ea-966f-5a4a41d60a20","Type":"ContainerStarted","Data":"9da2bbe9d02c2ada7011af5d387397a84c6ab45bdb30d0181b673c69401d618d"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.036319 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" event={"ID":"7cd346f7-8914-4089-8e15-89085f8340d2","Type":"ContainerStarted","Data":"c80b56ac81d1c32c97af94e53baa54be00e489d394d13216cd73cf6ed7f376fa"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.076859 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" event={"ID":"d6c003d6-66db-4a16-86a2-ba0d03cf1a25","Type":"ContainerStarted","Data":"e5b4250d84743975ee47e01f4588be2bd9133020626c2587073a788bdf265505"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.084028 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.084458 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.584438142 +0000 UTC m=+157.168323729 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.176900 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" event={"ID":"aafa6785-2823-4a2c-919d-b0276056337c","Type":"ContainerStarted","Data":"c5045b71f5f9ac7a6ffca652322bcdbff5d1adbb09e87a6f6799b8ea0b5d90ed"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.184688 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.185136 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.68511179 +0000 UTC m=+157.268997377 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.203782 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-hn6rt" event={"ID":"477ea589-1909-4668-ab2a-585589a4c8b6","Type":"ContainerStarted","Data":"4f5335e981bb0d1e6bb2750fda98e9cc76995cfe0e983486b8fd31ad2e61ccfc"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.289114 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-zbkcg" podStartSLOduration=125.289091873 podStartE2EDuration="2m5.289091873s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.22030412 +0000 UTC m=+156.804189707" watchObservedRunningTime="2026-01-26 16:57:09.289091873 +0000 UTC m=+156.872977460" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.289720 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-hn6rt" podStartSLOduration=8.289714691 podStartE2EDuration="8.289714691s" podCreationTimestamp="2026-01-26 16:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.289043552 +0000 UTC m=+156.872929149" watchObservedRunningTime="2026-01-26 16:57:09.289714691 +0000 UTC m=+156.873600278" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.289880 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.290276 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.790261017 +0000 UTC m=+157.374146604 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.331836 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qd267" podStartSLOduration=125.331810379 podStartE2EDuration="2m5.331810379s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.327927668 +0000 UTC m=+156.911813265" watchObservedRunningTime="2026-01-26 16:57:09.331810379 +0000 UTC m=+156.915695966" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.394743 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.396273 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.896251468 +0000 UTC m=+157.480137055 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.421733 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" event={"ID":"dcb838a3-bd74-4860-a1e7-c9bacf4334ca","Type":"ContainerStarted","Data":"4e92e3c48fd65d61aa00ee37d9e06d58a5835099aaddc091a45300c0ddcf3f76"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.497279 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-mjd22" podStartSLOduration=125.497253836 podStartE2EDuration="2m5.497253836s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.494850997 +0000 UTC m=+157.078736584" watchObservedRunningTime="2026-01-26 16:57:09.497253836 +0000 UTC m=+157.081139423" Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.497670 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:09.997655247 +0000 UTC m=+157.581540834 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.497317 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.511970 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" event={"ID":"3862707b-bcae-4ad0-b309-fa075dc82f70","Type":"ContainerStarted","Data":"a4e37ab8c7c06f32668fa357edcf672abfbea0b3c1190ac9c04afb7559243d0e"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.525331 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" event={"ID":"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2","Type":"ContainerStarted","Data":"42bb345ef93a84e159f9a46117cea21d327fdacebbb9307097aefabd714be68e"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.539131 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" event={"ID":"2918b19b-b96c-4b24-988d-f486d9cee307","Type":"ContainerStarted","Data":"19c934d432bb308ea52a3f953d6ae6a33e7e3334666c4f464a230b24f360eb7e"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.609474 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.610260 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.110243687 +0000 UTC m=+157.694129274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.680494 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" event={"ID":"5418254d-28ed-4c8e-b0d5-70ac0fe883ee","Type":"ContainerStarted","Data":"ed380e5b3850ace33117f13ce940ba6a9e02a7657de4b33c3277c97458214002"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.720063 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" event={"ID":"092200be-8013-4057-b586-d321c27dc3fb","Type":"ContainerStarted","Data":"e26bb5851fb06db23c38a99c3ff0c30616f2f545588a3bb79ba25f96f81572c7"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.721629 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.723402 4865 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-l6hpd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" start-of-body= Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.723457 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podUID="092200be-8013-4057-b586-d321c27dc3fb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.725178 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" event={"ID":"9c1dc714-6c88-4db6-ac20-54b9771956a3","Type":"ContainerStarted","Data":"202974a0a4450b388cce7c9b835a6c31d8a08f70fd30dd7a950f22f2c56aeb21"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.728012 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" event={"ID":"0af9b5a1-9cca-41dc-9a23-823a440db8c1","Type":"ContainerStarted","Data":"a2d68cafe2c53a398d1635763baec691058893fecc190bd1bef438d24b9e0e3e"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.731339 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" event={"ID":"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e","Type":"ContainerStarted","Data":"e81be678ceb7d9d43bc553133e59d5b72b33fd6fc0831579501434c7233290ce"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.734558 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsngx" event={"ID":"f8fac562-f66c-433b-9e4a-1a08fe6d78f5","Type":"ContainerStarted","Data":"ad9757fd3ce2e278b3e54f0a0d553f7ef6ba039725cb58726093d764ac9d814d"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.745341 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" event={"ID":"c62c3a20-356b-487a-80b8-aaea5650712b","Type":"ContainerStarted","Data":"d14a5b68ffb5f67ae5c99632095e44a4fefda41c62f7dd1abb0b63e11f158208"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.751832 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" event={"ID":"d3d19ca3-d0e7-4ef8-9112-c66a479404d2","Type":"ContainerStarted","Data":"468f89e944a9d89c8dcb681c632a79df8d6931208e81c588b8d40f05a9b3dea8"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.764077 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.765866 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.265853982 +0000 UTC m=+157.849739569 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.880872 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:09 crc kubenswrapper[4865]: E0126 16:57:09.881298 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.381278564 +0000 UTC m=+157.965164151 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.899203 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podStartSLOduration=125.899178077 podStartE2EDuration="2m5.899178077s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.899033053 +0000 UTC m=+157.482918640" watchObservedRunningTime="2026-01-26 16:57:09.899178077 +0000 UTC m=+157.483063664" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.900067 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7pntj" event={"ID":"9156ce7a-0206-4509-b090-d2e93c83f425","Type":"ContainerStarted","Data":"37aac76f00b505725306c6ccf8f49386ab3561937095c7d54c9e23e7fe81d8ab"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.901429 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.905247 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:09 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:09 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:09 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.905304 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.906719 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" event={"ID":"b7cf8ff2-589a-4f1a-aa4a-0afe351bdd3b","Type":"ContainerStarted","Data":"bd18befd5bbec5aa6de63c2c7edddaeca2f1ecdc4feeca43526a1e614c888e7a"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.915553 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.915971 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.947328 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" podStartSLOduration=125.947307498 podStartE2EDuration="2m5.947307498s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:09.946800083 +0000 UTC m=+157.530685680" watchObservedRunningTime="2026-01-26 16:57:09.947307498 +0000 UTC m=+157.531193085" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.963574 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" event={"ID":"ce5bf85d-dd65-4174-8fdc-10988986d665","Type":"ContainerStarted","Data":"cc5d1e460c5cdff15cc7001bb603dc4a93626e259a4c356270bfa5786f57c2e5"} Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.964529 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.974662 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:09 crc kubenswrapper[4865]: I0126 16:57:09.974736 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.004047 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.018379 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.518358906 +0000 UTC m=+158.102244493 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.084293 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" event={"ID":"8ae57544-4ad2-46e2-b205-3a770abfc47f","Type":"ContainerStarted","Data":"bd95286fb8a61193ad4c9cec1b2ddee842c525fbbcc68e0b4dbc9bcd260b89ad"} Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.102653 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" event={"ID":"44932794-d83d-4520-ab4c-21e5a7652506","Type":"ContainerStarted","Data":"6df5950446311722913d929897ceb7f13406a6485af79d7bc388a37b50290493"} Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.112577 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.114452 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.114654 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" podStartSLOduration=126.114629949 podStartE2EDuration="2m6.114629949s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.002326236 +0000 UTC m=+157.586211823" watchObservedRunningTime="2026-01-26 16:57:10.114629949 +0000 UTC m=+157.698515536" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.116650 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podStartSLOduration=126.116638136 podStartE2EDuration="2m6.116638136s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.113054503 +0000 UTC m=+157.696940120" watchObservedRunningTime="2026-01-26 16:57:10.116638136 +0000 UTC m=+157.700523723" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.116910 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.616868243 +0000 UTC m=+158.200753840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.118066 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.119788 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.619768106 +0000 UTC m=+158.203653703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.139763 4865 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-kw5g8 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.139831 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" podUID="44932794-d83d-4520-ab4c-21e5a7652506" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.39:8443/healthz\": dial tcp 10.217.0.39:8443: connect: connection refused" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.150226 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" event={"ID":"55e71027-c397-4e49-adef-6f8d1c7d78d2","Type":"ContainerStarted","Data":"750d2d8f119178dd73c8a2e30efe1bca50e3fbbf00569c0a370fae973e2a9d6a"} Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.157220 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" event={"ID":"e3cf8ef1-8f1a-4688-8443-600830a81400","Type":"ContainerStarted","Data":"165061eeef15109f52a984489a8c110096b2f8c8127e2a39e15a055164e9ae42"} Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.169043 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" event={"ID":"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2","Type":"ContainerStarted","Data":"ff07f96d9ea0e99381a8ba631b54452f37f375a6a8189820ef50711ba37788da"} Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.177337 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7pntj" podStartSLOduration=126.177297687 podStartE2EDuration="2m6.177297687s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.173362214 +0000 UTC m=+157.757247821" watchObservedRunningTime="2026-01-26 16:57:10.177297687 +0000 UTC m=+157.761183274" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.211814 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-vzw8s" podStartSLOduration=126.211792196 podStartE2EDuration="2m6.211792196s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.211458797 +0000 UTC m=+157.795344384" watchObservedRunningTime="2026-01-26 16:57:10.211792196 +0000 UTC m=+157.795677783" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.212434 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-tndns" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.219721 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.221317 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.721298579 +0000 UTC m=+158.305184166 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.316913 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-mxn5w" podStartSLOduration=126.316883531 podStartE2EDuration="2m6.316883531s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.276926085 +0000 UTC m=+157.860811682" watchObservedRunningTime="2026-01-26 16:57:10.316883531 +0000 UTC m=+157.900769128" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.322430 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.322831 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.822803171 +0000 UTC m=+158.406688758 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.426048 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.426833 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.426896 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.426921 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.426958 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.427848 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:10.927816184 +0000 UTC m=+158.511701771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.428550 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.450879 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.450978 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g66fj" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.456312 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.458863 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.495735 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" podStartSLOduration=126.495713922 podStartE2EDuration="2m6.495713922s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.318065365 +0000 UTC m=+157.901950972" watchObservedRunningTime="2026-01-26 16:57:10.495713922 +0000 UTC m=+158.079599509" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.496325 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" podStartSLOduration=126.496317389 podStartE2EDuration="2m6.496317389s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:10.494644091 +0000 UTC m=+158.078529688" watchObservedRunningTime="2026-01-26 16:57:10.496317389 +0000 UTC m=+158.080202976" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.528047 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.528482 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.028464382 +0000 UTC m=+158.612349979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.574656 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.590182 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.603339 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.628798 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.628977 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.128939813 +0000 UTC m=+158.712825400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.629130 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.629755 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.129744936 +0000 UTC m=+158.713630533 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.690307 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:10 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:10 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:10 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.690373 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.730694 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.731147 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.231123185 +0000 UTC m=+158.815008772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.832778 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.833748 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.333731829 +0000 UTC m=+158.917617416 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.935403 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:10 crc kubenswrapper[4865]: E0126 16:57:10.935836 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.435814178 +0000 UTC m=+159.019699765 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:10 crc kubenswrapper[4865]: I0126 16:57:10.983854 4865 csr.go:261] certificate signing request csr-7hzmt is approved, waiting to be issued Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.029530 4865 csr.go:257] certificate signing request csr-7hzmt is issued Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.036666 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.037033 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.537017051 +0000 UTC m=+159.120902648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.139559 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.139984 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.639960505 +0000 UTC m=+159.223846092 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.198964 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-kvvch" event={"ID":"0af9b5a1-9cca-41dc-9a23-823a440db8c1","Type":"ContainerStarted","Data":"7f55b71207b870e76f187d03c37fb05ccd3c2ddfa90a4645b5a2b81ec49fbc1f"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.214538 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" event={"ID":"3862707b-bcae-4ad0-b309-fa075dc82f70","Type":"ContainerStarted","Data":"9e4f3695228595d6e9784f1242760500f9692c79b1b471531b361b48a69482c9"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.234356 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" event={"ID":"5418254d-28ed-4c8e-b0d5-70ac0fe883ee","Type":"ContainerStarted","Data":"eb9af84c2d0aba7e8ba65863e48a11463e209ac0f8ef9e83ae33999d817a11fb"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.240471 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.242711 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.742697532 +0000 UTC m=+159.326583119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.252645 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-z8s8l" event={"ID":"55e71027-c397-4e49-adef-6f8d1c7d78d2","Type":"ContainerStarted","Data":"692149be2353d98abe412a5569289ab33ea338278dae66ac0db7e29aaae93933"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.292741 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" event={"ID":"aafa6785-2823-4a2c-919d-b0276056337c","Type":"ContainerStarted","Data":"5f00dfae138d4666fb8e44e7876dac905f560921603902077032bd7a1a72b012"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.343704 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.344475 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.844456142 +0000 UTC m=+159.428341729 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.345750 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" event={"ID":"ce5bf85d-dd65-4174-8fdc-10988986d665","Type":"ContainerStarted","Data":"51ca7c97b44cf70fd6b579979b4622ce13160e317e74d5ad33e83a7b0bf15983"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.345817 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.345841 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.379978 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" event={"ID":"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2","Type":"ContainerStarted","Data":"414453055af873b296b9022b77be22ae79b117962e551b611fd01892a79197fe"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.382779 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gtdlb" event={"ID":"40010721-d20f-43ea-966f-5a4a41d60a20","Type":"ContainerStarted","Data":"ccb3b4cd7834d0d7c507ed8cea1033cc90aea99131c1694aac11ce1cb6627bba"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.384261 4865 generic.go:334] "Generic (PLEG): container finished" podID="d6c003d6-66db-4a16-86a2-ba0d03cf1a25" containerID="0680faceca032c2f037d52557ed2830f80783f43552037717a06d2586e10ef45" exitCode=0 Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.384311 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" event={"ID":"d6c003d6-66db-4a16-86a2-ba0d03cf1a25","Type":"ContainerDied","Data":"0680faceca032c2f037d52557ed2830f80783f43552037717a06d2586e10ef45"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.425217 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" podStartSLOduration=128.425200438 podStartE2EDuration="2m8.425200438s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:11.310182638 +0000 UTC m=+158.894068215" watchObservedRunningTime="2026-01-26 16:57:11.425200438 +0000 UTC m=+159.009086026" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.426264 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-gzx5p" podStartSLOduration=127.426238528 podStartE2EDuration="2m7.426238528s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:11.379682202 +0000 UTC m=+158.963567789" watchObservedRunningTime="2026-01-26 16:57:11.426238528 +0000 UTC m=+159.010124115" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.433707 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" event={"ID":"d3d19ca3-d0e7-4ef8-9112-c66a479404d2","Type":"ContainerStarted","Data":"d5ccc8d95705d0ade2728074821c436c19800163cb7217cac64e69becda756fa"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.447353 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.449480 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:11.949467555 +0000 UTC m=+159.533353142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.450812 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" event={"ID":"e3cf8ef1-8f1a-4688-8443-600830a81400","Type":"ContainerStarted","Data":"07c2756ee4a5d62d230e2416f7ae3e9e28268e49b57279a77748052df9e0e8de"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.490472 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" event={"ID":"82bc8cef-2bec-4d01-a1a1-fd9c19c830a2","Type":"ContainerStarted","Data":"e74b90359118e6847362cd7f830f87e211d8f605fe2bbb024f2bfc4deeca5612"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.495453 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsngx" event={"ID":"f8fac562-f66c-433b-9e4a-1a08fe6d78f5","Type":"ContainerStarted","Data":"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.498492 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" event={"ID":"2918b19b-b96c-4b24-988d-f486d9cee307","Type":"ContainerStarted","Data":"cce8cbdb5b6fdb23d88d87cf403ebadd2cb0fa4b248a762258c951501e311727"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.499013 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.550249 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.550910 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.050891155 +0000 UTC m=+159.634776742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.552024 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" event={"ID":"0e850ae3-1746-4f97-9314-e6add8fb4fed","Type":"ContainerStarted","Data":"f8104c8aef0fff774b86f0939829dd50f671033478e9c55f711362faba413cd0"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.552059 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" event={"ID":"0e850ae3-1746-4f97-9314-e6add8fb4fed","Type":"ContainerStarted","Data":"a5c65aecb53688f380a0e699e5435307bc27594a1c93b28811ceb0a0dbbf62e1"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.585589 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" event={"ID":"9c1dc714-6c88-4db6-ac20-54b9771956a3","Type":"ContainerStarted","Data":"9e6fdbdf8b35605d406917257dd7f5d9a195c65c75901fa22aea643a8b2a4ed6"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.585659 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" event={"ID":"9c1dc714-6c88-4db6-ac20-54b9771956a3","Type":"ContainerStarted","Data":"81864ca14d09a95db6c44a73ee232aaf61c9f90ee56a75d1198eb18f343f48b3"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.588593 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" event={"ID":"9fc43392-6eaa-4e4c-bffb-42bdbc556d4e","Type":"ContainerStarted","Data":"e26091a0f8cbb2aa44e7190710fa7409bf314df5c2785397e2101105fff9ac5e"} Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.612601 4865 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-l6hpd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" start-of-body= Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.612647 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podUID="092200be-8013-4057-b586-d321c27dc3fb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.616563 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.616612 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.635248 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-shl5n" podStartSLOduration=127.635221964 podStartE2EDuration="2m7.635221964s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:11.634636797 +0000 UTC m=+159.218522394" watchObservedRunningTime="2026-01-26 16:57:11.635221964 +0000 UTC m=+159.219107551" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.657539 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.664087 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-kw5g8" Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.665509 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.165493213 +0000 UTC m=+159.749378790 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.796164 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:11 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:11 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:11 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.796593 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.799851 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:11 crc kubenswrapper[4865]: E0126 16:57:11.800167 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.300148306 +0000 UTC m=+159.884033893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:11 crc kubenswrapper[4865]: I0126 16:57:11.801089 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mfq2t" podStartSLOduration=127.801079813 podStartE2EDuration="2m7.801079813s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:11.800514106 +0000 UTC m=+159.384399693" watchObservedRunningTime="2026-01-26 16:57:11.801079813 +0000 UTC m=+159.384965400" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:11.903932 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:11.904417 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.404400987 +0000 UTC m=+159.988286574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.006826 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.007419 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.507370641 +0000 UTC m=+160.091256228 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.070882 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-26 16:52:10 +0000 UTC, rotation deadline is 2026-11-08 11:50:07.638767872 +0000 UTC Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.098877 4865 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6858h52m55.539924416s for next certificate rotation Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.117860 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.118311 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.618295644 +0000 UTC m=+160.202181231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.181027 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-7kt64" podStartSLOduration=128.180983912 podStartE2EDuration="2m8.180983912s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.068202967 +0000 UTC m=+159.652088564" watchObservedRunningTime="2026-01-26 16:57:12.180983912 +0000 UTC m=+159.764869499" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.383565 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.383801 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.883763415 +0000 UTC m=+160.467649002 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.383921 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.388570 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.888546022 +0000 UTC m=+160.472431609 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.485368 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5mz4s" podStartSLOduration=128.485345185 podStartE2EDuration="2m8.485345185s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.37693812 +0000 UTC m=+159.960823737" watchObservedRunningTime="2026-01-26 16:57:12.485345185 +0000 UTC m=+160.069230772" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.486517 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.486909 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:12.986882749 +0000 UTC m=+160.570768336 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.550637 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dsngx" podStartSLOduration=129.550611387 podStartE2EDuration="2m9.550611387s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.487169297 +0000 UTC m=+160.071054884" watchObservedRunningTime="2026-01-26 16:57:12.550611387 +0000 UTC m=+160.134496974" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.553400 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" podStartSLOduration=128.553390427 podStartE2EDuration="2m8.553390427s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.549690551 +0000 UTC m=+160.133576138" watchObservedRunningTime="2026-01-26 16:57:12.553390427 +0000 UTC m=+160.137276004" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.599886 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.600436 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.100418536 +0000 UTC m=+160.684304123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.700420 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:12 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:12 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:12 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.700504 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.700978 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.701468 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.201448885 +0000 UTC m=+160.785334472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.725047 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" event={"ID":"e3cf8ef1-8f1a-4688-8443-600830a81400","Type":"ContainerStarted","Data":"9e4eec0b1de3e50b439a988f27336c9208c119e4b456b1202a7b6044dc391a42"} Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.732171 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-mdgf6" podStartSLOduration=128.732152636 podStartE2EDuration="2m8.732152636s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.645888991 +0000 UTC m=+160.229774578" watchObservedRunningTime="2026-01-26 16:57:12.732152636 +0000 UTC m=+160.316038223" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.732726 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4b7rk" podStartSLOduration=129.732720282 podStartE2EDuration="2m9.732720282s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.724411144 +0000 UTC m=+160.308296731" watchObservedRunningTime="2026-01-26 16:57:12.732720282 +0000 UTC m=+160.316605869" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.752306 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gtdlb" event={"ID":"40010721-d20f-43ea-966f-5a4a41d60a20","Type":"ContainerStarted","Data":"2722eb90565fce993218cb612d0f5a55244d1d712b9a891936edaedd843e983f"} Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.753289 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.763067 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.763103 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.763159 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.763172 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.819943 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.865795 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.365779349 +0000 UTC m=+160.949664936 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:12 crc kubenswrapper[4865]: I0126 16:57:12.927745 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:12 crc kubenswrapper[4865]: E0126 16:57:12.928199 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.42817859 +0000 UTC m=+161.012064177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.086969 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.087455 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.587423922 +0000 UTC m=+161.171309509 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.256610 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.269071 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.769030939 +0000 UTC m=+161.352916526 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.269846 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.270626 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.770610984 +0000 UTC m=+161.354496571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.276296 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-gtdlb" podStartSLOduration=12.276186084 podStartE2EDuration="12.276186084s" podCreationTimestamp="2026-01-26 16:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:12.96513601 +0000 UTC m=+160.549021597" watchObservedRunningTime="2026-01-26 16:57:13.276186084 +0000 UTC m=+160.860071681" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.336382 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-vsrcj" podStartSLOduration=129.33634624 podStartE2EDuration="2m9.33634624s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:13.270319116 +0000 UTC m=+160.854204703" watchObservedRunningTime="2026-01-26 16:57:13.33634624 +0000 UTC m=+160.920231827" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.370381 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.370694 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.870669895 +0000 UTC m=+161.454555482 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.476091 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.476464 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:13.97644649 +0000 UTC m=+161.560332077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.560092 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.560752 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.579614 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.579941 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.079898298 +0000 UTC m=+161.663783895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.580114 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.580253 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.580451 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.581082 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.081063771 +0000 UTC m=+161.664949358 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.640362 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.650462 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.662665 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.681236 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.681462 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.681521 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.681592 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.681665 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.181646237 +0000 UTC m=+161.765531824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: W0126 16:57:13.691166 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-c41f71ac8d21dcf4396ef29eb275cbb524668a5808cd82b9e8c60865dea745da WatchSource:0}: Error finding container c41f71ac8d21dcf4396ef29eb275cbb524668a5808cd82b9e8c60865dea745da: Status 404 returned error can't find the container with id c41f71ac8d21dcf4396ef29eb275cbb524668a5808cd82b9e8c60865dea745da Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.691412 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:13 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:13 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:13 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.691454 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.738228 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.791112 4865 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-l6hpd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.791665 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podUID="092200be-8013-4057-b586-d321c27dc3fb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.794470 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.795103 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.295084762 +0000 UTC m=+161.878970349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.843980 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" event={"ID":"d6c003d6-66db-4a16-86a2-ba0d03cf1a25","Type":"ContainerStarted","Data":"bf54204d0229d90e005dc59f1e452504f544b7a4db8879f0e5da0b71e4834e61"} Jan 26 16:57:13 crc kubenswrapper[4865]: I0126 16:57:13.896297 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:13 crc kubenswrapper[4865]: E0126 16:57:13.896701 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.396675866 +0000 UTC m=+161.980561453 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.965440 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.971873 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"eb074ff9f8dc30b6e374b677a0a5a1715d0d51cc59286f3672f5f1f605ae8183"} Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.973521 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1e720d0e024665434b94529146e73932b7eef7fdb6ac0816fd44b5ea2b68b599"} Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.977180 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c41f71ac8d21dcf4396ef29eb275cbb524668a5808cd82b9e8c60865dea745da"} Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.979445 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:13.979507 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.027425 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.029245 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.52923243 +0000 UTC m=+162.113118017 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.041535 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.041615 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.129155 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.130540 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.630505945 +0000 UTC m=+162.214391532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.148259 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.150887 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.232404 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.234684 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.734658732 +0000 UTC m=+162.318544319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.263636 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.264325 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.263958 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.264424 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.318845 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.468130 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.468871 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:14.968850161 +0000 UTC m=+162.552735748 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.468911 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.522739 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.522825 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.522837 4865 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-phb7b container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.522915 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.536630 4865 patch_prober.go:28] interesting pod/console-f9d7485db-dsngx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.536707 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsngx" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.573896 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.574526 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.074491122 +0000 UTC m=+162.658376709 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.805435 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.806519 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.306491459 +0000 UTC m=+162.890377056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.814585 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.832467 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:14 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:14 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:14 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.832542 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:14 crc kubenswrapper[4865]: I0126 16:57:14.908111 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:14 crc kubenswrapper[4865]: E0126 16:57:14.909760 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.409730781 +0000 UTC m=+162.993616528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.009860 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.010381 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.510316587 +0000 UTC m=+163.094202174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.040511 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"37da0cd907619372d6068deaa79a100ce4f2402cdbe3b7506f97a0407aac95e5"} Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.040575 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.056029 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3e1ac8d7061f4d889b0809d96ce0ca84021707c50484aa313bdacc2f4a384289"} Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.071593 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"ec6e2add640c1302c53cf04850a2a9ea463ceb083d29517a2e46d7ac66b69719"} Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.079185 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" event={"ID":"d6c003d6-66db-4a16-86a2-ba0d03cf1a25","Type":"ContainerStarted","Data":"1efef7fb492c778181faf37e073f2ccfe278ff6fd72d56fa31ce6ec3fe28949f"} Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.113650 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.120450 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zkwx7" Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.126538 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.62651588 +0000 UTC m=+163.210401467 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.130270 4865 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-l6hpd container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.132235 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podUID="092200be-8013-4057-b586-d321c27dc3fb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.132414 4865 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-l6hpd container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.132509 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" podUID="092200be-8013-4057-b586-d321c27dc3fb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.207289 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.208626 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.215207 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.215722 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.715695738 +0000 UTC m=+163.299581325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.234801 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.297081 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.364812 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.365979 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.366647 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.367534 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.368517 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.368671 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.375327 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376729 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376797 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376829 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376847 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6lkv\" (UniqueName: \"kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376871 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376907 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376921 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56lwr\" (UniqueName: \"kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376952 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.376975 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.377017 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk767\" (UniqueName: \"kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.377042 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.377068 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nhjm\" (UniqueName: \"kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.377102 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.377433 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.877422158 +0000 UTC m=+163.461307745 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.451775 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.477933 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478095 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nhjm\" (UniqueName: \"kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478123 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478153 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478170 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478197 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6lkv\" (UniqueName: \"kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478221 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478257 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478279 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56lwr\" (UniqueName: \"kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478294 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478329 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478365 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk767\" (UniqueName: \"kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.478391 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.479046 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.479642 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.479793 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.480100 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.480390 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.485434 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" podStartSLOduration=132.485407336 podStartE2EDuration="2m12.485407336s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:15.452254965 +0000 UTC m=+163.036140562" watchObservedRunningTime="2026-01-26 16:57:15.485407336 +0000 UTC m=+163.069292923" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.488772 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.489329 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.489493 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:15.989466973 +0000 UTC m=+163.573352560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.489768 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.490544 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.506443 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.579801 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.580193 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.080180326 +0000 UTC m=+163.664065913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.608315 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6lkv\" (UniqueName: \"kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv\") pod \"community-operators-7x4jl\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.608326 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk767\" (UniqueName: \"kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767\") pod \"community-operators-6vslt\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.639116 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nhjm\" (UniqueName: \"kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm\") pod \"certified-operators-fkmhm\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.654206 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56lwr\" (UniqueName: \"kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr\") pod \"certified-operators-gw6ld\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.698967 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.703568 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.203546345 +0000 UTC m=+163.787431922 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.714636 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.745945 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:15 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:15 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:15 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.746034 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.802377 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.824603 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.835437 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.335409418 +0000 UTC m=+163.919295005 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.836075 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.839571 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.872120 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:57:15 crc kubenswrapper[4865]: I0126 16:57:15.951621 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:15 crc kubenswrapper[4865]: E0126 16:57:15.952312 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.452292262 +0000 UTC m=+164.036177849 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.055940 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.056342 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.556328567 +0000 UTC m=+164.140214154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.157507 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.157977 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.657956913 +0000 UTC m=+164.241842500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.218394 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c335727-ad5b-4e67-ab8d-a97e0d20d310","Type":"ContainerStarted","Data":"c8e3747a5257ba102eee4a75f4fd8323a2eb8dac7c8dd14f49b0283f3e8fc9fe"} Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.260831 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.261291 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.761268546 +0000 UTC m=+164.345154133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.316479 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" event={"ID":"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2","Type":"ContainerStarted","Data":"b3c6ab1b0728460927cf31a93d08aec6fcace722d18582afe3b02d3abab6dd15"} Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.363148 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.364599 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.864577731 +0000 UTC m=+164.448463318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.471257 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.473740 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:16.973720292 +0000 UTC m=+164.557605879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.587538 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.587944 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.087927919 +0000 UTC m=+164.671813506 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.611070 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.612182 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.612277 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.619896 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.702423 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.702544 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.702591 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.702614 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k587s\" (UniqueName: \"kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.702989 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.202975799 +0000 UTC m=+164.786861386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.725220 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:16 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:16 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:16 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.725285 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.803200 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.803421 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.803459 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.803475 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k587s\" (UniqueName: \"kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.803984 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.303970587 +0000 UTC m=+164.887856174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.805172 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d5vtl"] Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.806246 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.807303 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.808087 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.873969 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k587s\" (UniqueName: \"kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s\") pod \"redhat-marketplace-r9jxm\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.897730 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5vtl"] Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.904350 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.904400 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.904441 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.904498 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r77gt\" (UniqueName: \"kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:16 crc kubenswrapper[4865]: E0126 16:57:16.904795 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.404784229 +0000 UTC m=+164.988669816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:16 crc kubenswrapper[4865]: I0126 16:57:16.949521 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.040372 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.040665 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.540623567 +0000 UTC m=+165.124509154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.040915 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r77gt\" (UniqueName: \"kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.041059 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.041082 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.041173 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.041768 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.541754169 +0000 UTC m=+165.125639756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.043209 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.043527 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.138113 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r77gt\" (UniqueName: \"kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt\") pod \"redhat-marketplace-d5vtl\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.156744 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.157813 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.657599363 +0000 UTC m=+165.241484950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.158068 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.162074 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.172018 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.187453 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.188089 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.212379 4865 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.219483 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.274366 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.274425 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.274503 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.274957 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.774941059 +0000 UTC m=+165.358826646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.389441 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.390190 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.390231 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.390888 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.391119 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.890971698 +0000 UTC m=+165.474857285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.394073 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c335727-ad5b-4e67-ab8d-a97e0d20d310","Type":"ContainerStarted","Data":"e09a08331abd7a20f214b45ff4a018e880b189be01b32d129b63874c22afa836"} Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.474142 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.491143 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.491123841 podStartE2EDuration="4.491123841s" podCreationTimestamp="2026-01-26 16:57:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:17.488535997 +0000 UTC m=+165.072421584" watchObservedRunningTime="2026-01-26 16:57:17.491123841 +0000 UTC m=+165.075009428" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.493599 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.493937 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:17.993925452 +0000 UTC m=+165.577811039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.540215 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.594847 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.596015 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.09598357 +0000 UTC m=+165.679869157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.611648 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.684424 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:17 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:17 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:17 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.684486 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.708899 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.709383 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.209371463 +0000 UTC m=+165.793257040 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.789746 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.791148 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.801688 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.813146 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.813564 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.813726 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.313691905 +0000 UTC m=+165.897577492 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.813795 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.814494 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.314483638 +0000 UTC m=+165.898369225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.914763 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.915487 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.915531 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:17 crc kubenswrapper[4865]: I0126 16:57:17.915590 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5mqf\" (UniqueName: \"kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:17 crc kubenswrapper[4865]: E0126 16:57:17.915695 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.415682182 +0000 UTC m=+165.999567759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.016483 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.016561 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.016591 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.016640 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5mqf\" (UniqueName: \"kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.017636 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.017835 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: E0126 16:57:18.018275 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.51811433 +0000 UTC m=+166.101999917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.069431 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5mqf\" (UniqueName: \"kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf\") pod \"redhat-operators-hxg9k\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.119216 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:18 crc kubenswrapper[4865]: E0126 16:57:18.119840 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.619812448 +0000 UTC m=+166.203698035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.179440 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.187097 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.190606 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.190714 4865 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-26T16:57:17.212420245Z","Handler":null,"Name":""} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.207418 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.221284 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:18 crc kubenswrapper[4865]: E0126 16:57:18.221614 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 16:57:18.721602619 +0000 UTC m=+166.305488206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-gfbfj" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.227420 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 16:57:18 crc kubenswrapper[4865]: W0126 16:57:18.237648 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod28d973df_adc6_41b0_81b9_afd3a743641f.slice/crio-3e9d3cd15811ee0ec5c96ee9118d1714ce3c7154d8f277f81eeac9d7c0ca421c WatchSource:0}: Error finding container 3e9d3cd15811ee0ec5c96ee9118d1714ce3c7154d8f277f81eeac9d7c0ca421c: Status 404 returned error can't find the container with id 3e9d3cd15811ee0ec5c96ee9118d1714ce3c7154d8f277f81eeac9d7c0ca421c Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.253461 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.260362 4865 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.260411 4865 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.278804 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.311273 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5vtl"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.315633 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.323906 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.324197 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rchs4\" (UniqueName: \"kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.324239 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.324326 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: W0126 16:57:18.325171 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod203f4245_9105_442f_a78b_dc354926516b.slice/crio-79ff78f6a9302f566f01c172407c2668e21ae1c10e1c6c064a11ba6922703273 WatchSource:0}: Error finding container 79ff78f6a9302f566f01c172407c2668e21ae1c10e1c6c064a11ba6922703273: Status 404 returned error can't find the container with id 79ff78f6a9302f566f01c172407c2668e21ae1c10e1c6c064a11ba6922703273 Jan 26 16:57:18 crc kubenswrapper[4865]: W0126 16:57:18.357186 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod109ec94e_2b65_4cc6_a53c_3f874401fd5e.slice/crio-44c2cb7ac7262cd4ea685ef9e99a08e559573a39b02237f50af5a406aa759889 WatchSource:0}: Error finding container 44c2cb7ac7262cd4ea685ef9e99a08e559573a39b02237f50af5a406aa759889: Status 404 returned error can't find the container with id 44c2cb7ac7262cd4ea685ef9e99a08e559573a39b02237f50af5a406aa759889 Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.357864 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.420099 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.433159 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.433690 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rchs4\" (UniqueName: \"kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.433815 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.433967 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.434353 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.439640 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.442147 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.442191 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.448583 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.468772 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerStarted","Data":"79ff78f6a9302f566f01c172407c2668e21ae1c10e1c6c064a11ba6922703273"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.486684 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rchs4\" (UniqueName: \"kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4\") pod \"redhat-operators-pg69s\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.487796 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerStarted","Data":"b66624227081c3403e448e8cff50fd13055d9f88112599cdc725a4db0d6323cb"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.489016 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerStarted","Data":"449e124fb15ade49d894be02feff87e8408a0b914228f6505ef2242b307a6054"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.492495 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" event={"ID":"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2","Type":"ContainerStarted","Data":"ba5793a32087f7f09a03490fdb79921e355889d4260f663307555c2b5056ac50"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.492530 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" event={"ID":"e8e3ebba-3ac5-4cee-8fa8-273a1f25eaf2","Type":"ContainerStarted","Data":"ec028b4b6efd0ef30ecc91e1696c00d6ca80023df67633b21b78411c276afbb5"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.494956 4865 generic.go:334] "Generic (PLEG): container finished" podID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerID="4255e619441f82281bf484b7eaf5f429412ee79230e309dda58d4113c9c084e1" exitCode=0 Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.495033 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerDied","Data":"4255e619441f82281bf484b7eaf5f429412ee79230e309dda58d4113c9c084e1"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.495052 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerStarted","Data":"b77351e4d4d08b7aa8f664a2981356f5e5876ab56a06be20d0b5541d20d177b3"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.512972 4865 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.624818 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.635660 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-gfbfj\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.652333 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerStarted","Data":"3e9d3cd15811ee0ec5c96ee9118d1714ce3c7154d8f277f81eeac9d7c0ca421c"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.666379 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerStarted","Data":"44c2cb7ac7262cd4ea685ef9e99a08e559573a39b02237f50af5a406aa759889"} Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.680631 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wx8d6" podStartSLOduration=17.680590327 podStartE2EDuration="17.680590327s" podCreationTimestamp="2026-01-26 16:57:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:18.629893593 +0000 UTC m=+166.213779180" watchObservedRunningTime="2026-01-26 16:57:18.680590327 +0000 UTC m=+166.264475934" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.701534 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:18 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:18 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:18 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.702244 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:18 crc kubenswrapper[4865]: I0126 16:57:18.772353 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.014981 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.496398 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.496935 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.581438 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.678969 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:19 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:19 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:19 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.680464 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" event={"ID":"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625","Type":"ContainerStarted","Data":"c394ed494db06392ea6567794819c06635cbea9016883a5bbd59c0b15dd95d55"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.680945 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.682174 4865 generic.go:334] "Generic (PLEG): container finished" podID="203f4245-9105-442f-a78b-dc354926516b" containerID="f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6" exitCode=0 Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.682227 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerDied","Data":"f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.687948 4865 generic.go:334] "Generic (PLEG): container finished" podID="7337688e-9bf5-436c-be77-afad7fa093ed" containerID="199a7f344e605cfba54f04202ad8536a401034b3240f7fd1a01fdb5c29d5073c" exitCode=0 Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.688016 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerDied","Data":"199a7f344e605cfba54f04202ad8536a401034b3240f7fd1a01fdb5c29d5073c"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.702388 4865 generic.go:334] "Generic (PLEG): container finished" podID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerID="3264020642a3593da10e8e9650cc7daa51e0cef67a76db5c21c5b394de83b752" exitCode=0 Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.702487 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerDied","Data":"3264020642a3593da10e8e9650cc7daa51e0cef67a76db5c21c5b394de83b752"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.705652 4865 generic.go:334] "Generic (PLEG): container finished" podID="28d973df-adc6-41b0-81b9-afd3a743641f" containerID="4dc0a3a6e67aa86fb23ba40815915da6aa28dd41101e79d40808ad70298c1b0a" exitCode=0 Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.705702 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerDied","Data":"4dc0a3a6e67aa86fb23ba40815915da6aa28dd41101e79d40808ad70298c1b0a"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.711847 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerStarted","Data":"4992a2592c64dad3a19efb08f8edcb038ec0da078e820f28a1c80d98f7397a15"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.713249 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8","Type":"ContainerStarted","Data":"fc4a2265a07f1955b822315b301c3e5aeaeb43ea8ec22f7e9500558d5643ef72"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.715098 4865 generic.go:334] "Generic (PLEG): container finished" podID="7c335727-ad5b-4e67-ab8d-a97e0d20d310" containerID="e09a08331abd7a20f214b45ff4a018e880b189be01b32d129b63874c22afa836" exitCode=0 Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.715147 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c335727-ad5b-4e67-ab8d-a97e0d20d310","Type":"ContainerDied","Data":"e09a08331abd7a20f214b45ff4a018e880b189be01b32d129b63874c22afa836"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.717347 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerStarted","Data":"dcdd8eefb507efdbe1e407f939c28a25389d558d748ce86a1a08840d33c5d563"} Jan 26 16:57:19 crc kubenswrapper[4865]: I0126 16:57:19.837360 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.064201 4865 patch_prober.go:28] interesting pod/apiserver-76f77b778f-6lhmn container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]log ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]etcd ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/generic-apiserver-start-informers ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/max-in-flight-filter ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 26 16:57:20 crc kubenswrapper[4865]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 26 16:57:20 crc kubenswrapper[4865]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/project.openshift.io-projectcache ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 26 16:57:20 crc kubenswrapper[4865]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 26 16:57:20 crc kubenswrapper[4865]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 26 16:57:20 crc kubenswrapper[4865]: livez check failed Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.064754 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" podUID="d6c003d6-66db-4a16-86a2-ba0d03cf1a25" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.685691 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:20 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:20 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:20 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.685780 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.896076 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" event={"ID":"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625","Type":"ContainerStarted","Data":"d3cfa5e14f941ec45b4d6bbaec8032621018739ee882910197b779b79676deb3"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.897318 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.910085 4865 generic.go:334] "Generic (PLEG): container finished" podID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerID="3a6e1f3ba6ae2b3ef3d4cf5aef1b551da2777b6c2b579d6c7bfe48ae5527d647" exitCode=0 Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.910242 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerDied","Data":"3a6e1f3ba6ae2b3ef3d4cf5aef1b551da2777b6c2b579d6c7bfe48ae5527d647"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.932751 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8","Type":"ContainerStarted","Data":"ff02e541b2830afc275c2a473530a6caeac4c5fc0a422ae132c7a8bbfab920d8"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.939099 4865 generic.go:334] "Generic (PLEG): container finished" podID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerID="dcdd8eefb507efdbe1e407f939c28a25389d558d748ce86a1a08840d33c5d563" exitCode=0 Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.939183 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerDied","Data":"dcdd8eefb507efdbe1e407f939c28a25389d558d748ce86a1a08840d33c5d563"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.942074 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerStarted","Data":"506dc6f0514fc4a08d184451ec692c5c72ce381e34c1ff3b27e3b7afcfcba658"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.942105 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerStarted","Data":"6590cef2a7f1cb1544986c097b1e906db70623745c1fa3226f89378c47bc3c18"} Jan 26 16:57:20 crc kubenswrapper[4865]: I0126 16:57:20.958483 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" podStartSLOduration=136.95845765 podStartE2EDuration="2m16.95845765s" podCreationTimestamp="2026-01-26 16:55:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:20.950658067 +0000 UTC m=+168.534543654" watchObservedRunningTime="2026-01-26 16:57:20.95845765 +0000 UTC m=+168.542343247" Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.150781 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.150740147 podStartE2EDuration="4.150740147s" podCreationTimestamp="2026-01-26 16:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:21.13896925 +0000 UTC m=+168.722854837" watchObservedRunningTime="2026-01-26 16:57:21.150740147 +0000 UTC m=+168.734625734" Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.678287 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:21 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:21 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:21 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.678353 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.908450 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.965962 4865 generic.go:334] "Generic (PLEG): container finished" podID="4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" containerID="ff02e541b2830afc275c2a473530a6caeac4c5fc0a422ae132c7a8bbfab920d8" exitCode=0 Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.966078 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8","Type":"ContainerDied","Data":"ff02e541b2830afc275c2a473530a6caeac4c5fc0a422ae132c7a8bbfab920d8"} Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.971125 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.971177 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"7c335727-ad5b-4e67-ab8d-a97e0d20d310","Type":"ContainerDied","Data":"c8e3747a5257ba102eee4a75f4fd8323a2eb8dac7c8dd14f49b0283f3e8fc9fe"} Jan 26 16:57:21 crc kubenswrapper[4865]: I0126 16:57:21.971266 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8e3747a5257ba102eee4a75f4fd8323a2eb8dac7c8dd14f49b0283f3e8fc9fe" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.014238 4865 generic.go:334] "Generic (PLEG): container finished" podID="e402709d-4ea5-403d-af16-3ab178841b35" containerID="506dc6f0514fc4a08d184451ec692c5c72ce381e34c1ff3b27e3b7afcfcba658" exitCode=0 Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.015285 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerDied","Data":"506dc6f0514fc4a08d184451ec692c5c72ce381e34c1ff3b27e3b7afcfcba658"} Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.082571 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir\") pod \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.082751 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access\") pod \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\" (UID: \"7c335727-ad5b-4e67-ab8d-a97e0d20d310\") " Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.084191 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7c335727-ad5b-4e67-ab8d-a97e0d20d310" (UID: "7c335727-ad5b-4e67-ab8d-a97e0d20d310"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.147010 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7c335727-ad5b-4e67-ab8d-a97e0d20d310" (UID: "7c335727-ad5b-4e67-ab8d-a97e0d20d310"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.186534 4865 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.186568 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7c335727-ad5b-4e67-ab8d-a97e0d20d310-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.705242 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:22 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:22 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:22 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.705298 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:22 crc kubenswrapper[4865]: I0126 16:57:22.850240 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-gtdlb" Jan 26 16:57:23 crc kubenswrapper[4865]: I0126 16:57:23.031005 4865 generic.go:334] "Generic (PLEG): container finished" podID="3862707b-bcae-4ad0-b309-fa075dc82f70" containerID="9e4f3695228595d6e9784f1242760500f9692c79b1b471531b361b48a69482c9" exitCode=0 Jan 26 16:57:23 crc kubenswrapper[4865]: I0126 16:57:23.031042 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" event={"ID":"3862707b-bcae-4ad0-b309-fa075dc82f70","Type":"ContainerDied","Data":"9e4f3695228595d6e9784f1242760500f9692c79b1b471531b361b48a69482c9"} Jan 26 16:57:23 crc kubenswrapper[4865]: I0126 16:57:23.678082 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:23 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:23 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:23 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:23 crc kubenswrapper[4865]: I0126 16:57:23.678181 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:23 crc kubenswrapper[4865]: I0126 16:57:23.841641 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.033760 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir\") pod \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.033846 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access\") pod \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\" (UID: \"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8\") " Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.035684 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" (UID: "4184b2be-7b37-40a8-b2d0-9b91d5daf5a8"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.050630 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"4184b2be-7b37-40a8-b2d0-9b91d5daf5a8","Type":"ContainerDied","Data":"fc4a2265a07f1955b822315b301c3e5aeaeb43ea8ec22f7e9500558d5643ef72"} Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.050701 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc4a2265a07f1955b822315b301c3e5aeaeb43ea8ec22f7e9500558d5643ef72" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.050659 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.133711 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-l6hpd" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.136238 4865 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.142464 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" (UID: "4184b2be-7b37-40a8-b2d0-9b91d5daf5a8"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.238134 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4184b2be-7b37-40a8-b2d0-9b91d5daf5a8-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.263571 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.263631 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.265059 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.265155 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.313788 4865 patch_prober.go:28] interesting pod/console-f9d7485db-dsngx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" start-of-body= Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.313856 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsngx" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.16:8443/health\": dial tcp 10.217.0.16:8443: connect: connection refused" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.511769 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.523865 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-6lhmn" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.590882 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.785585 4865 patch_prober.go:28] interesting pod/router-default-5444994796-gx9t9 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 16:57:24 crc kubenswrapper[4865]: [-]has-synced failed: reason withheld Jan 26 16:57:24 crc kubenswrapper[4865]: [+]process-running ok Jan 26 16:57:24 crc kubenswrapper[4865]: healthz check failed Jan 26 16:57:24 crc kubenswrapper[4865]: I0126 16:57:24.786072 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-gx9t9" podUID="a5eb1e8d-ffa8-4422-aa5d-852074436139" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.256233 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.331021 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume" (OuterVolumeSpecName: "config-volume") pod "3862707b-bcae-4ad0-b309-fa075dc82f70" (UID: "3862707b-bcae-4ad0-b309-fa075dc82f70"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.329830 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume\") pod \"3862707b-bcae-4ad0-b309-fa075dc82f70\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.331142 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltnmr\" (UniqueName: \"kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr\") pod \"3862707b-bcae-4ad0-b309-fa075dc82f70\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.331266 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume\") pod \"3862707b-bcae-4ad0-b309-fa075dc82f70\" (UID: \"3862707b-bcae-4ad0-b309-fa075dc82f70\") " Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.331699 4865 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3862707b-bcae-4ad0-b309-fa075dc82f70-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.358445 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr" (OuterVolumeSpecName: "kube-api-access-ltnmr") pod "3862707b-bcae-4ad0-b309-fa075dc82f70" (UID: "3862707b-bcae-4ad0-b309-fa075dc82f70"). InnerVolumeSpecName "kube-api-access-ltnmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.359004 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3862707b-bcae-4ad0-b309-fa075dc82f70" (UID: "3862707b-bcae-4ad0-b309-fa075dc82f70"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.440642 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltnmr\" (UniqueName: \"kubernetes.io/projected/3862707b-bcae-4ad0-b309-fa075dc82f70-kube-api-access-ltnmr\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.440688 4865 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3862707b-bcae-4ad0-b309-fa075dc82f70-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.678304 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:25 crc kubenswrapper[4865]: I0126 16:57:25.684548 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-gx9t9" Jan 26 16:57:26 crc kubenswrapper[4865]: I0126 16:57:26.077802 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" event={"ID":"3862707b-bcae-4ad0-b309-fa075dc82f70","Type":"ContainerDied","Data":"a4e37ab8c7c06f32668fa357edcf672abfbea0b3c1190ac9c04afb7559243d0e"} Jan 26 16:57:26 crc kubenswrapper[4865]: I0126 16:57:26.077823 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490765-lx6b6" Jan 26 16:57:26 crc kubenswrapper[4865]: I0126 16:57:26.077858 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e37ab8c7c06f32668fa357edcf672abfbea0b3c1190ac9c04afb7559243d0e" Jan 26 16:57:27 crc kubenswrapper[4865]: I0126 16:57:26.966262 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:57:27 crc kubenswrapper[4865]: I0126 16:57:26.984867 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/27ebe888-610a-47c4-b256-3ddbf03f83b9-metrics-certs\") pod \"network-metrics-daemon-wx7wp\" (UID: \"27ebe888-610a-47c4-b256-3ddbf03f83b9\") " pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:57:27 crc kubenswrapper[4865]: I0126 16:57:27.084417 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-wx7wp" Jan 26 16:57:28 crc kubenswrapper[4865]: I0126 16:57:28.097860 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-wx7wp"] Jan 26 16:57:28 crc kubenswrapper[4865]: I0126 16:57:28.177946 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" event={"ID":"27ebe888-610a-47c4-b256-3ddbf03f83b9","Type":"ContainerStarted","Data":"1f407e625b3be4efe3891b664addb5930e449ae5abd5b484755235ed17ab3955"} Jan 26 16:57:31 crc kubenswrapper[4865]: I0126 16:57:31.256864 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" event={"ID":"27ebe888-610a-47c4-b256-3ddbf03f83b9","Type":"ContainerStarted","Data":"a729ebdbc1217b83e1c58759aa6af437e968200732425f052d449fb3cbf00cc2"} Jan 26 16:57:32 crc kubenswrapper[4865]: I0126 16:57:32.349216 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-wx7wp" event={"ID":"27ebe888-610a-47c4-b256-3ddbf03f83b9","Type":"ContainerStarted","Data":"022b8424c07e706adc67d3cda23e5192a8ed4a79b612768c59c9b3ab1441051f"} Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.262604 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.263016 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.262850 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.263147 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.263201 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.263919 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"37aac76f00b505725306c6ccf8f49386ab3561937095c7d54c9e23e7fe81d8ab"} pod="openshift-console/downloads-7954f5f757-7pntj" containerMessage="Container download-server failed liveness probe, will be restarted" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.264055 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" containerID="cri-o://37aac76f00b505725306c6ccf8f49386ab3561937095c7d54c9e23e7fe81d8ab" gracePeriod=2 Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.265500 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.265563 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.378541 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.384564 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.400275 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-wx7wp" podStartSLOduration=151.400245188 podStartE2EDuration="2m31.400245188s" podCreationTimestamp="2026-01-26 16:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:57:32.37140592 +0000 UTC m=+179.955291517" watchObservedRunningTime="2026-01-26 16:57:34.400245188 +0000 UTC m=+181.984130765" Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.516297 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 16:57:34 crc kubenswrapper[4865]: I0126 16:57:34.516423 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 16:57:35 crc kubenswrapper[4865]: I0126 16:57:35.379349 4865 generic.go:334] "Generic (PLEG): container finished" podID="9156ce7a-0206-4509-b090-d2e93c83f425" containerID="37aac76f00b505725306c6ccf8f49386ab3561937095c7d54c9e23e7fe81d8ab" exitCode=0 Jan 26 16:57:35 crc kubenswrapper[4865]: I0126 16:57:35.379438 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7pntj" event={"ID":"9156ce7a-0206-4509-b090-d2e93c83f425","Type":"ContainerDied","Data":"37aac76f00b505725306c6ccf8f49386ab3561937095c7d54c9e23e7fe81d8ab"} Jan 26 16:57:38 crc kubenswrapper[4865]: I0126 16:57:38.827115 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 16:57:44 crc kubenswrapper[4865]: I0126 16:57:44.154429 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-j2f55" Jan 26 16:57:44 crc kubenswrapper[4865]: I0126 16:57:44.265840 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:44 crc kubenswrapper[4865]: I0126 16:57:44.266677 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:50 crc kubenswrapper[4865]: I0126 16:57:50.969857 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.935542 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 16:57:51 crc kubenswrapper[4865]: E0126 16:57:51.936032 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936052 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: E0126 16:57:51.936075 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c335727-ad5b-4e67-ab8d-a97e0d20d310" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936085 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c335727-ad5b-4e67-ab8d-a97e0d20d310" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: E0126 16:57:51.936107 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3862707b-bcae-4ad0-b309-fa075dc82f70" containerName="collect-profiles" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936118 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="3862707b-bcae-4ad0-b309-fa075dc82f70" containerName="collect-profiles" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936246 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c335727-ad5b-4e67-ab8d-a97e0d20d310" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936261 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="4184b2be-7b37-40a8-b2d0-9b91d5daf5a8" containerName="pruner" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936271 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="3862707b-bcae-4ad0-b309-fa075dc82f70" containerName="collect-profiles" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.936918 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.940272 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.941785 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 16:57:51 crc kubenswrapper[4865]: I0126 16:57:51.946169 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.015873 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.016086 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.117849 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.117930 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.118017 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.144634 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:52 crc kubenswrapper[4865]: I0126 16:57:52.272411 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:57:54 crc kubenswrapper[4865]: I0126 16:57:54.263071 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:57:54 crc kubenswrapper[4865]: I0126 16:57:54.263142 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.539637 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.540862 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.552652 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.632954 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.633112 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.633163 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.734167 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.734809 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.734985 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.735135 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.735154 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.754787 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access\") pod \"installer-9-crc\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:57:56 crc kubenswrapper[4865]: I0126 16:57:56.873927 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:58:03 crc kubenswrapper[4865]: E0126 16:58:03.848851 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 16:58:03 crc kubenswrapper[4865]: E0126 16:58:03.849445 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-56lwr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-gw6ld_openshift-marketplace(1fa30c32-b5b9-48db-baf7-761da95213f7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:03 crc kubenswrapper[4865]: E0126 16:58:03.850722 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-gw6ld" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.264178 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.264257 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.512197 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.512257 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.512307 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.512956 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.513031 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b" gracePeriod=600 Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.658187 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b" exitCode=0 Jan 26 16:58:04 crc kubenswrapper[4865]: I0126 16:58:04.658191 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b"} Jan 26 16:58:05 crc kubenswrapper[4865]: E0126 16:58:05.048934 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-gw6ld" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" Jan 26 16:58:05 crc kubenswrapper[4865]: E0126 16:58:05.114779 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 16:58:05 crc kubenswrapper[4865]: E0126 16:58:05.114935 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k587s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-r9jxm_openshift-marketplace(7337688e-9bf5-436c-be77-afad7fa093ed): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:05 crc kubenswrapper[4865]: E0126 16:58:05.116169 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-r9jxm" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.451071 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-r9jxm" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.530280 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.530492 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z6lkv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-7x4jl_openshift-marketplace(28d973df-adc6-41b0-81b9-afd3a743641f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.531183 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.531360 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pk767,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-6vslt_openshift-marketplace(203f4245-9105-442f-a78b-dc354926516b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.532524 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-6vslt" podUID="203f4245-9105-442f-a78b-dc354926516b" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.533348 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-7x4jl" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.753705 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.754304 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7nhjm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-fkmhm_openshift-marketplace(ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:07 crc kubenswrapper[4865]: E0126 16:58:07.755479 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-fkmhm" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.522144 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-6vslt" podUID="203f4245-9105-442f-a78b-dc354926516b" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.523641 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-7x4jl" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.523683 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-fkmhm" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.728331 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.729348 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t5mqf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hxg9k_openshift-marketplace(ea398908-abb3-4ce8-bbaf-a44b6350314d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.731256 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-hxg9k" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.780154 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.780309 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rchs4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-pg69s_openshift-marketplace(e402709d-4ea5-403d-af16-3ab178841b35): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.781478 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-pg69s" podUID="e402709d-4ea5-403d-af16-3ab178841b35" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.788062 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.788246 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-r77gt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-d5vtl_openshift-marketplace(109ec94e-2b65-4cc6-a53c-3f874401fd5e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 16:58:11 crc kubenswrapper[4865]: E0126 16:58:11.789414 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-d5vtl" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" Jan 26 16:58:11 crc kubenswrapper[4865]: I0126 16:58:11.841638 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 16:58:11 crc kubenswrapper[4865]: I0126 16:58:11.897941 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.743951 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7","Type":"ContainerStarted","Data":"ad966424308f8ef14dfdde2a400e3745ed182824a88985f95cf529e8be33fb24"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.744479 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7","Type":"ContainerStarted","Data":"b9a139018896935aae0e2aa25d45d12a4a3f48cdd47d09192db80bce98cbddb1"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.748328 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.757119 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"10a46546-e560-4ed8-8032-643d0a99fc09","Type":"ContainerStarted","Data":"688e569afa778b8ca62331b41cc99912be05ff5f2fd8cb23030037c78e459e8e"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.757175 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"10a46546-e560-4ed8-8032-643d0a99fc09","Type":"ContainerStarted","Data":"2dd7715d0a5c06625d6f4986481d18e8a930d4bd2a776612298b6c8bec7b9ce3"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.760305 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7pntj" event={"ID":"9156ce7a-0206-4509-b090-d2e93c83f425","Type":"ContainerStarted","Data":"d763ccfdfbd6a715aeb8a4dca5f769c308a195734161a33afdee607cbfe13d86"} Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.760772 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.761655 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.761815 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:12 crc kubenswrapper[4865]: E0126 16:58:12.762817 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hxg9k" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" Jan 26 16:58:12 crc kubenswrapper[4865]: E0126 16:58:12.762915 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-d5vtl" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" Jan 26 16:58:12 crc kubenswrapper[4865]: E0126 16:58:12.762976 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-pg69s" podUID="e402709d-4ea5-403d-af16-3ab178841b35" Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.795957 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=16.795939269 podStartE2EDuration="16.795939269s" podCreationTimestamp="2026-01-26 16:57:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:58:12.769617562 +0000 UTC m=+220.353503149" watchObservedRunningTime="2026-01-26 16:58:12.795939269 +0000 UTC m=+220.379824856" Jan 26 16:58:12 crc kubenswrapper[4865]: I0126 16:58:12.833807 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=21.833781581 podStartE2EDuration="21.833781581s" podCreationTimestamp="2026-01-26 16:57:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:58:12.830216758 +0000 UTC m=+220.414102345" watchObservedRunningTime="2026-01-26 16:58:12.833781581 +0000 UTC m=+220.417667168" Jan 26 16:58:13 crc kubenswrapper[4865]: I0126 16:58:13.767694 4865 generic.go:334] "Generic (PLEG): container finished" podID="10a46546-e560-4ed8-8032-643d0a99fc09" containerID="688e569afa778b8ca62331b41cc99912be05ff5f2fd8cb23030037c78e459e8e" exitCode=0 Jan 26 16:58:13 crc kubenswrapper[4865]: I0126 16:58:13.767853 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"10a46546-e560-4ed8-8032-643d0a99fc09","Type":"ContainerDied","Data":"688e569afa778b8ca62331b41cc99912be05ff5f2fd8cb23030037c78e459e8e"} Jan 26 16:58:13 crc kubenswrapper[4865]: I0126 16:58:13.770905 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:13 crc kubenswrapper[4865]: I0126 16:58:13.771018 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:14 crc kubenswrapper[4865]: I0126 16:58:14.263127 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:14 crc kubenswrapper[4865]: I0126 16:58:14.263209 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:14 crc kubenswrapper[4865]: I0126 16:58:14.263211 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:14 crc kubenswrapper[4865]: I0126 16:58:14.263307 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.009035 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.198152 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir\") pod \"10a46546-e560-4ed8-8032-643d0a99fc09\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.198306 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access\") pod \"10a46546-e560-4ed8-8032-643d0a99fc09\" (UID: \"10a46546-e560-4ed8-8032-643d0a99fc09\") " Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.198299 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "10a46546-e560-4ed8-8032-643d0a99fc09" (UID: "10a46546-e560-4ed8-8032-643d0a99fc09"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.198681 4865 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/10a46546-e560-4ed8-8032-643d0a99fc09-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.206032 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "10a46546-e560-4ed8-8032-643d0a99fc09" (UID: "10a46546-e560-4ed8-8032-643d0a99fc09"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.299525 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/10a46546-e560-4ed8-8032-643d0a99fc09-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.783151 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"10a46546-e560-4ed8-8032-643d0a99fc09","Type":"ContainerDied","Data":"2dd7715d0a5c06625d6f4986481d18e8a930d4bd2a776612298b6c8bec7b9ce3"} Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.783463 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dd7715d0a5c06625d6f4986481d18e8a930d4bd2a776612298b6c8bec7b9ce3" Jan 26 16:58:15 crc kubenswrapper[4865]: I0126 16:58:15.783244 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 16:58:24 crc kubenswrapper[4865]: I0126 16:58:24.204197 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerStarted","Data":"93e64981d8b9951ed6406840fbef03e757e2bb2a488f1543615661b32e606972"} Jan 26 16:58:24 crc kubenswrapper[4865]: I0126 16:58:24.262910 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:24 crc kubenswrapper[4865]: I0126 16:58:24.263010 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:24 crc kubenswrapper[4865]: I0126 16:58:24.263545 4865 patch_prober.go:28] interesting pod/downloads-7954f5f757-7pntj container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Jan 26 16:58:24 crc kubenswrapper[4865]: I0126 16:58:24.263628 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7pntj" podUID="9156ce7a-0206-4509-b090-d2e93c83f425" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Jan 26 16:58:26 crc kubenswrapper[4865]: I0126 16:58:26.216700 4865 generic.go:334] "Generic (PLEG): container finished" podID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerID="93e64981d8b9951ed6406840fbef03e757e2bb2a488f1543615661b32e606972" exitCode=0 Jan 26 16:58:26 crc kubenswrapper[4865]: I0126 16:58:26.216778 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerDied","Data":"93e64981d8b9951ed6406840fbef03e757e2bb2a488f1543615661b32e606972"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.245090 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerStarted","Data":"141e4cb88b61ce456d7319fae495575d0906950c62e6c46fb927848a6dee34cd"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.253318 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerStarted","Data":"923a5ed4618c80f10c1343d9559301b15c16773d29a39c2e3cc3360fb420c9c9"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.255518 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerStarted","Data":"1fdc1af6f42220ef6f9fe4dcab666d0787bb7069b7272e868ab095b1d0c393df"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.257729 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerStarted","Data":"4591f46793bd88eca8ded4e7a3162c6f5c33af4606451fa3f64fb6ee007727af"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.273125 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerStarted","Data":"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.278566 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerStarted","Data":"dc1415cba01735bf8f78879807619fb140a2fa74976ea17e880df730de5916c5"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.283625 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerStarted","Data":"e3e0e69295103de391636c38ede4250e4e109f56322501446ce035b5c809b31a"} Jan 26 16:58:31 crc kubenswrapper[4865]: I0126 16:58:31.505948 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-gw6ld" podStartSLOduration=5.696887226 podStartE2EDuration="1m16.505925799s" podCreationTimestamp="2026-01-26 16:57:15 +0000 UTC" firstStartedPulling="2026-01-26 16:57:19.718253918 +0000 UTC m=+167.302139505" lastFinishedPulling="2026-01-26 16:58:30.527292501 +0000 UTC m=+238.111178078" observedRunningTime="2026-01-26 16:58:31.489100079 +0000 UTC m=+239.072985686" watchObservedRunningTime="2026-01-26 16:58:31.505925799 +0000 UTC m=+239.089811386" Jan 26 16:58:32 crc kubenswrapper[4865]: I0126 16:58:32.289642 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerStarted","Data":"d17f97b8b34a236431f2df8eaf3aba2e41f97dc9b1512fa84044f3fd8da2d3b3"} Jan 26 16:58:32 crc kubenswrapper[4865]: I0126 16:58:32.292277 4865 generic.go:334] "Generic (PLEG): container finished" podID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerID="923a5ed4618c80f10c1343d9559301b15c16773d29a39c2e3cc3360fb420c9c9" exitCode=0 Jan 26 16:58:32 crc kubenswrapper[4865]: I0126 16:58:32.292430 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerDied","Data":"923a5ed4618c80f10c1343d9559301b15c16773d29a39c2e3cc3360fb420c9c9"} Jan 26 16:58:32 crc kubenswrapper[4865]: I0126 16:58:32.299508 4865 generic.go:334] "Generic (PLEG): container finished" podID="7337688e-9bf5-436c-be77-afad7fa093ed" containerID="dc1415cba01735bf8f78879807619fb140a2fa74976ea17e880df730de5916c5" exitCode=0 Jan 26 16:58:32 crc kubenswrapper[4865]: I0126 16:58:32.299647 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerDied","Data":"dc1415cba01735bf8f78879807619fb140a2fa74976ea17e880df730de5916c5"} Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.309093 4865 generic.go:334] "Generic (PLEG): container finished" podID="203f4245-9105-442f-a78b-dc354926516b" containerID="90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12" exitCode=0 Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.309215 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerDied","Data":"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12"} Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.311985 4865 generic.go:334] "Generic (PLEG): container finished" podID="28d973df-adc6-41b0-81b9-afd3a743641f" containerID="141e4cb88b61ce456d7319fae495575d0906950c62e6c46fb927848a6dee34cd" exitCode=0 Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.312281 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerDied","Data":"141e4cb88b61ce456d7319fae495575d0906950c62e6c46fb927848a6dee34cd"} Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.863967 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k6gs8"] Jan 26 16:58:33 crc kubenswrapper[4865]: E0126 16:58:33.864389 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10a46546-e560-4ed8-8032-643d0a99fc09" containerName="pruner" Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.864415 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="10a46546-e560-4ed8-8032-643d0a99fc09" containerName="pruner" Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.864558 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="10a46546-e560-4ed8-8032-643d0a99fc09" containerName="pruner" Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.865192 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.883452 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k6gs8"] Jan 26 16:58:33 crc kubenswrapper[4865]: I0126 16:58:33.976769 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-npnhh"] Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.080911 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081064 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081103 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-bound-sa-token\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081140 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081185 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-trusted-ca\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081220 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-tls\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081247 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-certificates\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.081298 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwg7h\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-kube-api-access-lwg7h\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.144723 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.181924 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-tls\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182251 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-certificates\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182368 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwg7h\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-kube-api-access-lwg7h\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182467 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182572 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-bound-sa-token\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182667 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.182769 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-trusted-ca\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.183871 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-certificates\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.184043 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-trusted-ca\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.184186 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-ca-trust-extracted\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.197430 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-registry-tls\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.207231 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-installation-pull-secrets\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.222224 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwg7h\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-kube-api-access-lwg7h\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.226834 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/216aec98-a029-4ddc-ac94-2b3a02fa9ff9-bound-sa-token\") pod \"image-registry-66df7c8f76-k6gs8\" (UID: \"216aec98-a029-4ddc-ac94-2b3a02fa9ff9\") " pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.275065 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7pntj" Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.354189 4865 generic.go:334] "Generic (PLEG): container finished" podID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerID="4591f46793bd88eca8ded4e7a3162c6f5c33af4606451fa3f64fb6ee007727af" exitCode=0 Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.354272 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerDied","Data":"4591f46793bd88eca8ded4e7a3162c6f5c33af4606451fa3f64fb6ee007727af"} Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.356138 4865 generic.go:334] "Generic (PLEG): container finished" podID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerID="d17f97b8b34a236431f2df8eaf3aba2e41f97dc9b1512fa84044f3fd8da2d3b3" exitCode=0 Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.356164 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerDied","Data":"d17f97b8b34a236431f2df8eaf3aba2e41f97dc9b1512fa84044f3fd8da2d3b3"} Jan 26 16:58:34 crc kubenswrapper[4865]: I0126 16:58:34.486460 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:35 crc kubenswrapper[4865]: I0126 16:58:35.365126 4865 generic.go:334] "Generic (PLEG): container finished" podID="e402709d-4ea5-403d-af16-3ab178841b35" containerID="1fdc1af6f42220ef6f9fe4dcab666d0787bb7069b7272e868ab095b1d0c393df" exitCode=0 Jan 26 16:58:35 crc kubenswrapper[4865]: I0126 16:58:35.365240 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerDied","Data":"1fdc1af6f42220ef6f9fe4dcab666d0787bb7069b7272e868ab095b1d0c393df"} Jan 26 16:58:35 crc kubenswrapper[4865]: I0126 16:58:35.874176 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:58:35 crc kubenswrapper[4865]: I0126 16:58:35.874245 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:58:36 crc kubenswrapper[4865]: I0126 16:58:36.348078 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:58:36 crc kubenswrapper[4865]: I0126 16:58:36.412427 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 16:58:37 crc kubenswrapper[4865]: I0126 16:58:37.085288 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-k6gs8"] Jan 26 16:58:37 crc kubenswrapper[4865]: W0126 16:58:37.092972 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod216aec98_a029_4ddc_ac94_2b3a02fa9ff9.slice/crio-77d681efb42c809b8f8272c2b7887c34654b1f78ddad71ffa9c459687283dd60 WatchSource:0}: Error finding container 77d681efb42c809b8f8272c2b7887c34654b1f78ddad71ffa9c459687283dd60: Status 404 returned error can't find the container with id 77d681efb42c809b8f8272c2b7887c34654b1f78ddad71ffa9c459687283dd60 Jan 26 16:58:37 crc kubenswrapper[4865]: I0126 16:58:37.375665 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" event={"ID":"216aec98-a029-4ddc-ac94-2b3a02fa9ff9","Type":"ContainerStarted","Data":"77d681efb42c809b8f8272c2b7887c34654b1f78ddad71ffa9c459687283dd60"} Jan 26 16:58:40 crc kubenswrapper[4865]: I0126 16:58:40.407086 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerStarted","Data":"d84edf2c216e585f647c74ee23cc9cf7d2e6450ac0a20757d2ffd2ff48519a17"} Jan 26 16:58:40 crc kubenswrapper[4865]: I0126 16:58:40.408862 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" event={"ID":"216aec98-a029-4ddc-ac94-2b3a02fa9ff9","Type":"ContainerStarted","Data":"bbe06471214ae4227023d99495c21e48114ff4da7ba98cffad40796c07296916"} Jan 26 16:58:40 crc kubenswrapper[4865]: I0126 16:58:40.410686 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerStarted","Data":"959905e099406c6e75d168053fe44fa35624ed5be1256a908fc63de2b0957066"} Jan 26 16:58:41 crc kubenswrapper[4865]: I0126 16:58:41.415888 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:58:42 crc kubenswrapper[4865]: I0126 16:58:42.438551 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" podStartSLOduration=9.438529775 podStartE2EDuration="9.438529775s" podCreationTimestamp="2026-01-26 16:58:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:58:41.439888524 +0000 UTC m=+249.023774121" watchObservedRunningTime="2026-01-26 16:58:42.438529775 +0000 UTC m=+250.022415362" Jan 26 16:58:42 crc kubenswrapper[4865]: I0126 16:58:42.440924 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r9jxm" podStartSLOduration=9.823111679 podStartE2EDuration="1m26.440913024s" podCreationTimestamp="2026-01-26 16:57:16 +0000 UTC" firstStartedPulling="2026-01-26 16:57:19.722130609 +0000 UTC m=+167.306016196" lastFinishedPulling="2026-01-26 16:58:36.339931954 +0000 UTC m=+243.923817541" observedRunningTime="2026-01-26 16:58:42.437366471 +0000 UTC m=+250.021252058" watchObservedRunningTime="2026-01-26 16:58:42.440913024 +0000 UTC m=+250.024798611" Jan 26 16:58:43 crc kubenswrapper[4865]: I0126 16:58:43.446144 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d5vtl" podStartSLOduration=10.670618252 podStartE2EDuration="1m27.446036243s" podCreationTimestamp="2026-01-26 16:57:16 +0000 UTC" firstStartedPulling="2026-01-26 16:57:20.942942416 +0000 UTC m=+168.526828003" lastFinishedPulling="2026-01-26 16:58:37.718360377 +0000 UTC m=+245.302245994" observedRunningTime="2026-01-26 16:58:43.4438463 +0000 UTC m=+251.027731887" watchObservedRunningTime="2026-01-26 16:58:43.446036243 +0000 UTC m=+251.029921850" Jan 26 16:58:46 crc kubenswrapper[4865]: I0126 16:58:46.443484 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerStarted","Data":"2f319958091fc0d9103daaf13741d642c5e7458be0a2050797d55e55e5f02b1e"} Jan 26 16:58:46 crc kubenswrapper[4865]: I0126 16:58:46.951581 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:58:46 crc kubenswrapper[4865]: I0126 16:58:46.952280 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.047723 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.159394 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.159902 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.227353 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.478976 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7x4jl" podStartSLOduration=6.901118674 podStartE2EDuration="1m32.47895239s" podCreationTimestamp="2026-01-26 16:57:15 +0000 UTC" firstStartedPulling="2026-01-26 16:57:19.718137075 +0000 UTC m=+167.302022652" lastFinishedPulling="2026-01-26 16:58:45.295970781 +0000 UTC m=+252.879856368" observedRunningTime="2026-01-26 16:58:47.478629191 +0000 UTC m=+255.062514768" watchObservedRunningTime="2026-01-26 16:58:47.47895239 +0000 UTC m=+255.062837977" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.502971 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:47 crc kubenswrapper[4865]: I0126 16:58:47.512952 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.284095 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d5vtl"] Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.460660 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d5vtl" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="registry-server" containerID="cri-o://959905e099406c6e75d168053fe44fa35624ed5be1256a908fc63de2b0957066" gracePeriod=2 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.812496 4865 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.812968 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06" gracePeriod=15 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.813102 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769" gracePeriod=15 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.813174 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c" gracePeriod=15 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.813168 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1" gracePeriod=15 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.813102 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6" gracePeriod=15 Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814356 4865 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814747 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814765 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814779 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814788 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814804 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814813 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814829 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814836 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814844 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814852 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814866 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814875 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.814887 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.814898 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815079 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815093 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815104 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815115 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815131 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815144 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815152 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 16:58:49 crc kubenswrapper[4865]: E0126 16:58:49.815272 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.815282 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.816967 4865 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.818116 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.823518 4865 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.838802 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.838979 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839100 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839150 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839392 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839612 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839876 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.839980 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941204 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941306 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941355 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941343 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941431 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941375 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941479 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941514 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941508 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941552 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941575 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941621 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941547 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941622 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941645 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:49 crc kubenswrapper[4865]: I0126 16:58:49.941587 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:50 crc kubenswrapper[4865]: I0126 16:58:50.473421 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 26 16:58:50 crc kubenswrapper[4865]: I0126 16:58:50.475768 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:50 crc kubenswrapper[4865]: I0126 16:58:50.476816 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c" exitCode=2 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.494868 4865 generic.go:334] "Generic (PLEG): container finished" podID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" containerID="ad966424308f8ef14dfdde2a400e3745ed182824a88985f95cf529e8be33fb24" exitCode=0 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.494948 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7","Type":"ContainerDied","Data":"ad966424308f8ef14dfdde2a400e3745ed182824a88985f95cf529e8be33fb24"} Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.497958 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.502033 4865 generic.go:334] "Generic (PLEG): container finished" podID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerID="959905e099406c6e75d168053fe44fa35624ed5be1256a908fc63de2b0957066" exitCode=0 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.502043 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerDied","Data":"959905e099406c6e75d168053fe44fa35624ed5be1256a908fc63de2b0957066"} Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.508936 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.511423 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.512602 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6" exitCode=0 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.512639 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1" exitCode=0 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.512647 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769" exitCode=0 Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.512717 4865 scope.go:117] "RemoveContainer" containerID="6c1edd3e77b4ffac1714ebc5d607e249b2da809f853af4b2b5639551368bc604" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.802685 4865 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.802930 4865 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.803279 4865 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.803890 4865 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.804337 4865 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:52 crc kubenswrapper[4865]: I0126 16:58:52.804444 4865 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 26 16:58:52 crc kubenswrapper[4865]: E0126 16:58:52.804795 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="200ms" Jan 26 16:58:53 crc kubenswrapper[4865]: E0126 16:58:53.005815 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="400ms" Jan 26 16:58:53 crc kubenswrapper[4865]: E0126 16:58:53.407505 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="800ms" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.523485 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.525393 4865 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06" exitCode=0 Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.829686 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.830629 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.913982 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access\") pod \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914154 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock\") pod \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914208 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir\") pod \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\" (UID: \"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7\") " Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914294 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock" (OuterVolumeSpecName: "var-lock") pod "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" (UID: "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914387 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" (UID: "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914609 4865 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.914638 4865 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:53 crc kubenswrapper[4865]: I0126 16:58:53.922807 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" (UID: "2c6a6074-31d0-4bf7-a237-e9b4e2e17de7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.016155 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2c6a6074-31d0-4bf7-a237-e9b4e2e17de7-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.208168 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="1.6s" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.239590 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:58:54Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:58:54Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:58:54Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:58:54Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:024b1ed0676c2e11f6a319392c82e7acd0ceeae31ca00b202307c4d86a796b20\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:ada03173793960eaa0e4263282fcbf5af3dea8aaf2c3b0d864906108db062e8a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1672061160},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:700a5f979fa4ef2b6f03177e68a780c3d93e2a6f429cdaa50e43997cf400e60c\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:ffe83dddbe52f5e67e1462a3d99eed5cbcb1385f1a99af0cb768e4968931dc8c\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1203425009},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:baf4eb931aab99ddd36e09d79f76ea1128c2ef536e95b78edb9af73175db2be3\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:dfb030ab67faacd3572a0cae805bd05f041ba6a589cf6fb289cb2295f364c580\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1183907051},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:169566a3a0bc4f9ca64256bb682df6ad4e2cfc5740b5338370c8202d43621680\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:5e18cee5ade3fc0cec09a5ee469d5840c7f50ec0cda6b90150394ad661ac5380\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1179648738},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.240985 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.241736 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.241980 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.242313 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.242353 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.361223 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.520031 4865 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-pg69s.188e566e1bfe9c0b openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-pg69s,UID:e402709d-4ea5-403d-af16-3ab178841b35,APIVersion:v1,ResourceVersion:28669,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 18.948s (18.948s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 16:58:54.519245835 +0000 UTC m=+262.103131422,LastTimestamp:2026-01-26 16:58:54.519245835 +0000 UTC m=+262.103131422,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.536230 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"2c6a6074-31d0-4bf7-a237-e9b4e2e17de7","Type":"ContainerDied","Data":"b9a139018896935aae0e2aa25d45d12a4a3f48cdd47d09192db80bce98cbddb1"} Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.536343 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.536353 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9a139018896935aae0e2aa25d45d12a4a3f48cdd47d09192db80bce98cbddb1" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.541379 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.542890 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.543192 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.544464 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="950dca6989842d4c78d16fcd49365b4bdb01d50cdfa2937d3d6dbdbaa3bbe372" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.544770 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.545714 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.546042 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626433 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626601 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626665 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626705 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626714 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.626835 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.627117 4865 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.627145 4865 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.627157 4865 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.640175 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.640944 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.641221 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.641441 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.727593 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content\") pod \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.728262 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities\") pod \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.728372 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r77gt\" (UniqueName: \"kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt\") pod \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\" (UID: \"109ec94e-2b65-4cc6-a53c-3f874401fd5e\") " Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.729421 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities" (OuterVolumeSpecName: "utilities") pod "109ec94e-2b65-4cc6-a53c-3f874401fd5e" (UID: "109ec94e-2b65-4cc6-a53c-3f874401fd5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.736830 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt" (OuterVolumeSpecName: "kube-api-access-r77gt") pod "109ec94e-2b65-4cc6-a53c-3f874401fd5e" (UID: "109ec94e-2b65-4cc6-a53c-3f874401fd5e"). InnerVolumeSpecName "kube-api-access-r77gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.754720 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "109ec94e-2b65-4cc6-a53c-3f874401fd5e" (UID: "109ec94e-2b65-4cc6-a53c-3f874401fd5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.830557 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r77gt\" (UniqueName: \"kubernetes.io/projected/109ec94e-2b65-4cc6-a53c-3f874401fd5e-kube-api-access-r77gt\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.830602 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.830615 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/109ec94e-2b65-4cc6-a53c-3f874401fd5e-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:54 crc kubenswrapper[4865]: E0126 16:58:54.862559 4865 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:54 crc kubenswrapper[4865]: I0126 16:58:54.863712 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:54 crc kubenswrapper[4865]: W0126 16:58:54.889258 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-89a05b4150c599cefdf0ae5112fbe530420ded028860084e92d36c9c3d76e621 WatchSource:0}: Error finding container 89a05b4150c599cefdf0ae5112fbe530420ded028860084e92d36c9c3d76e621: Status 404 returned error can't find the container with id 89a05b4150c599cefdf0ae5112fbe530420ded028860084e92d36c9c3d76e621 Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.561433 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerStarted","Data":"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.563919 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.565405 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.565830 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.566298 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.568174 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerStarted","Data":"1ac6269cf415b0490fe6323374b8d8fce3bee37d104fe02f4eef033f92e9dbdd"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.568761 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.569042 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.569302 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.569525 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.569779 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.571905 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.571952 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"89a05b4150c599cefdf0ae5112fbe530420ded028860084e92d36c9c3d76e621"} Jan 26 16:58:55 crc kubenswrapper[4865]: E0126 16:58:55.572492 4865 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.572494 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.572769 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.573049 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.573321 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.573586 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.575549 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d5vtl" event={"ID":"109ec94e-2b65-4cc6-a53c-3f874401fd5e","Type":"ContainerDied","Data":"44c2cb7ac7262cd4ea685ef9e99a08e559573a39b02237f50af5a406aa759889"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.575567 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d5vtl" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.575599 4865 scope.go:117] "RemoveContainer" containerID="959905e099406c6e75d168053fe44fa35624ed5be1256a908fc63de2b0957066" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.577255 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.579681 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.580153 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.580767 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.581006 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerStarted","Data":"31f2dc7237b175955b2b9bdc7f325ece04182095f775ce4031678e45e333d8fe"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.581266 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.581670 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.581931 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.582320 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.582711 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.583081 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.583360 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.595258 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerStarted","Data":"e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47"} Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.596430 4865 scope.go:117] "RemoveContainer" containerID="923a5ed4618c80f10c1343d9559301b15c16773d29a39c2e3cc3360fb420c9c9" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.597102 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.598258 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.598511 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.598834 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.599402 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.599864 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.600184 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.600883 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.601307 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.601887 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.603064 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.603447 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.603802 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.604240 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.605505 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.606882 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.611337 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.614921 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.615344 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.615741 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.616109 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.616405 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.616748 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.627837 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.628245 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.628766 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.629119 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.629430 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.629720 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.630148 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.645301 4865 scope.go:117] "RemoveContainer" containerID="dcdd8eefb507efdbe1e407f939c28a25389d558d748ce86a1a08840d33c5d563" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.803711 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.803791 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:58:55 crc kubenswrapper[4865]: E0126 16:58:55.809121 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="3.2s" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.836483 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.836814 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.840624 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.842913 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.900496 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.901149 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.901873 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.902145 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.902416 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.902652 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.902897 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.903082 4865 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:55 crc kubenswrapper[4865]: I0126 16:58:55.903247 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.365576 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.669405 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.670344 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.672061 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.672443 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.673217 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.673620 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.673940 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.674278 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.852428 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-fkmhm" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="registry-server" probeResult="failure" output=< Jan 26 16:58:56 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 16:58:56 crc kubenswrapper[4865]: > Jan 26 16:58:56 crc kubenswrapper[4865]: I0126 16:58:56.884477 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-6vslt" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="registry-server" probeResult="failure" output=< Jan 26 16:58:56 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 16:58:56 crc kubenswrapper[4865]: > Jan 26 16:58:57 crc kubenswrapper[4865]: E0126 16:58:57.305313 4865 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.224:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-operators-pg69s.188e566e1bfe9c0b openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-operators-pg69s,UID:e402709d-4ea5-403d-af16-3ab178841b35,APIVersion:v1,ResourceVersion:28669,FieldPath:spec.containers{registry-server},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\" in 18.948s (18.948s including waiting). Image size: 907837715 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 16:58:54.519245835 +0000 UTC m=+262.103131422,LastTimestamp:2026-01-26 16:58:54.519245835 +0000 UTC m=+262.103131422,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 16:58:58 crc kubenswrapper[4865]: I0126 16:58:58.254715 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:58:58 crc kubenswrapper[4865]: I0126 16:58:58.256188 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:58:58 crc kubenswrapper[4865]: I0126 16:58:58.653239 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:58:58 crc kubenswrapper[4865]: I0126 16:58:58.655760 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:58:59 crc kubenswrapper[4865]: E0126 16:58:59.010323 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="6.4s" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.094238 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" containerID="cri-o://5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8" gracePeriod=15 Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.307481 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hxg9k" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" probeResult="failure" output=< Jan 26 16:58:59 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 16:58:59 crc kubenswrapper[4865]: > Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.574591 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.575448 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.575712 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.575969 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.576368 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.577904 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.578200 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.578527 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.578779 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.676948 4865 generic.go:334] "Generic (PLEG): container finished" podID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerID="5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8" exitCode=0 Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.677246 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.677279 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" event={"ID":"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043","Type":"ContainerDied","Data":"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8"} Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.678671 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" event={"ID":"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043","Type":"ContainerDied","Data":"67c87b6a183e6e1197b97ad8bb09cf465ce4f792818320f3a2e61465af857f13"} Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.678702 4865 scope.go:117] "RemoveContainer" containerID="5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.679101 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.680389 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.680919 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.681298 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.681511 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.681710 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.681893 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.682073 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.706596 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pg69s" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="registry-server" probeResult="failure" output=< Jan 26 16:58:59 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 16:58:59 crc kubenswrapper[4865]: > Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.710679 4865 scope.go:117] "RemoveContainer" containerID="5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8" Jan 26 16:58:59 crc kubenswrapper[4865]: E0126 16:58:59.711559 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8\": container with ID starting with 5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8 not found: ID does not exist" containerID="5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.711622 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8"} err="failed to get container status \"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8\": rpc error: code = NotFound desc = could not find container \"5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8\": container with ID starting with 5130cdd912347bde8e676845c6c867951a12ea8f74795f801b8429ffbb07e3c8 not found: ID does not exist" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723743 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723820 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723876 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723912 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723943 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lskq5\" (UniqueName: \"kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.723971 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724036 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724110 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724167 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724204 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724297 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724337 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724371 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.724403 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session\") pod \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\" (UID: \"5d8bdd7f-0bfa-41ae-b684-7d96b15b7043\") " Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.725074 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.725145 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.725236 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.726453 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.726486 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.747756 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.748681 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.749197 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.749627 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.750371 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5" (OuterVolumeSpecName: "kube-api-access-lskq5") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "kube-api-access-lskq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.751465 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.751530 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.751760 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.751959 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" (UID: "5d8bdd7f-0bfa-41ae-b684-7d96b15b7043"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826619 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826672 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826687 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826704 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826719 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826731 4865 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826742 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lskq5\" (UniqueName: \"kubernetes.io/projected/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-kube-api-access-lskq5\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826755 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826779 4865 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826797 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826811 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826823 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826842 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.826855 4865 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.994394 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.995084 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.995443 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.995755 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.995984 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.996466 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.997084 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:58:59 crc kubenswrapper[4865]: I0126 16:58:59.997418 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:00 crc kubenswrapper[4865]: E0126 16:59:00.436914 4865 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" volumeName="registry-storage" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.358165 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.361274 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.361569 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.361766 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.362137 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.362574 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.362830 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.363063 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.363276 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.364287 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.365200 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.365622 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.366057 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.366396 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.366781 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.367085 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.367421 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.373694 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:59:04Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:59:04Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:59:04Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T16:59:04Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:024b1ed0676c2e11f6a319392c82e7acd0ceeae31ca00b202307c4d86a796b20\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:ada03173793960eaa0e4263282fcbf5af3dea8aaf2c3b0d864906108db062e8a\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1672061160},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:700a5f979fa4ef2b6f03177e68a780c3d93e2a6f429cdaa50e43997cf400e60c\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:ffe83dddbe52f5e67e1462a3d99eed5cbcb1385f1a99af0cb768e4968931dc8c\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1203425009},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:baf4eb931aab99ddd36e09d79f76ea1128c2ef536e95b78edb9af73175db2be3\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:dfb030ab67faacd3572a0cae805bd05f041ba6a589cf6fb289cb2295f364c580\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1183907051},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:169566a3a0bc4f9ca64256bb682df6ad4e2cfc5740b5338370c8202d43621680\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:5e18cee5ade3fc0cec09a5ee469d5840c7f50ec0cda6b90150394ad661ac5380\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1179648738},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.374392 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.375160 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.375480 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.375853 4865 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.375876 4865 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.380967 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.381043 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.381595 4865 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.382409 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:04 crc kubenswrapper[4865]: W0126 16:59:04.422343 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-7fed800d0f4a408d55ad1838882bbb67d4727b206c606aa95a06f907ed71f0ee WatchSource:0}: Error finding container 7fed800d0f4a408d55ad1838882bbb67d4727b206c606aa95a06f907ed71f0ee: Status 404 returned error can't find the container with id 7fed800d0f4a408d55ad1838882bbb67d4727b206c606aa95a06f907ed71f0ee Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.495123 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.496551 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.497302 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.497640 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.498090 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.498539 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.498921 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.499258 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.499717 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.500466 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.510881 4865 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" volumeName="registry-storage" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.713081 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"321f59e64d6f7440b2b9072ad11cd03458bd36a6b0eee06c7b299a34116b49b4"} Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.713142 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7fed800d0f4a408d55ad1838882bbb67d4727b206c606aa95a06f907ed71f0ee"} Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.713505 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.713531 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.714457 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.714843 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.715196 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.715646 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.715917 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.716213 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.716494 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.716738 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: I0126 16:59:04.717007 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:04 crc kubenswrapper[4865]: E0126 16:59:04.717195 4865 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:05 crc kubenswrapper[4865]: E0126 16:59:05.412197 4865 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.224:6443: connect: connection refused" interval="7s" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.723109 4865 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="321f59e64d6f7440b2b9072ad11cd03458bd36a6b0eee06c7b299a34116b49b4" exitCode=0 Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.723185 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"321f59e64d6f7440b2b9072ad11cd03458bd36a6b0eee06c7b299a34116b49b4"} Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.723685 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.723711 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:05 crc kubenswrapper[4865]: E0126 16:59:05.724155 4865 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.724569 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.724901 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.725444 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.726030 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.726248 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.726448 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.726785 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.727196 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.727410 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.728337 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.728409 4865 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90" exitCode=1 Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.728449 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90"} Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.729196 4865 scope.go:117] "RemoveContainer" containerID="504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.731300 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.731966 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.732427 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.737801 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.738566 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.739246 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.739806 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.740230 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.746958 4865 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.747339 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.860021 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.860721 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.861226 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.861885 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.862348 4865 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.862920 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.863606 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.866804 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.870160 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.870643 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.870859 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.897463 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.898670 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.898821 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.898961 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.899136 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.899301 4865 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.899661 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.900156 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.900419 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.900677 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.900929 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.909973 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.910430 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.910699 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.910964 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.911204 4865 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.912559 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.913104 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.913571 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.914718 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.914965 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.915231 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.962818 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6vslt" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.963419 4865 status_manager.go:851] "Failed to get status for pod" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" pod="openshift-marketplace/community-operators-7x4jl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-7x4jl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.963680 4865 status_manager.go:851] "Failed to get status for pod" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" pod="openshift-marketplace/certified-operators-fkmhm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fkmhm\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.963862 4865 status_manager.go:851] "Failed to get status for pod" podUID="203f4245-9105-442f-a78b-dc354926516b" pod="openshift-marketplace/community-operators-6vslt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-6vslt\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964049 4865 status_manager.go:851] "Failed to get status for pod" podUID="216aec98-a029-4ddc-ac94-2b3a02fa9ff9" pod="openshift-image-registry/image-registry-66df7c8f76-k6gs8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/pods/image-registry-66df7c8f76-k6gs8\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964259 4865 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964469 4865 status_manager.go:851] "Failed to get status for pod" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964659 4865 status_manager.go:851] "Failed to get status for pod" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" pod="openshift-marketplace/redhat-marketplace-d5vtl" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-d5vtl\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964817 4865 status_manager.go:851] "Failed to get status for pod" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" pod="openshift-marketplace/redhat-operators-hxg9k" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-hxg9k\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.964983 4865 status_manager.go:851] "Failed to get status for pod" podUID="e402709d-4ea5-403d-af16-3ab178841b35" pod="openshift-marketplace/redhat-operators-pg69s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-pg69s\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:05 crc kubenswrapper[4865]: I0126 16:59:05.966174 4865 status_manager.go:851] "Failed to get status for pod" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" pod="openshift-authentication/oauth-openshift-558db77b4-npnhh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-npnhh\": dial tcp 38.102.83.224:6443: connect: connection refused" Jan 26 16:59:06 crc kubenswrapper[4865]: I0126 16:59:06.746574 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"753a4feea928f00146898f3d8ff3318423b0a401e814d2b2cadc7b3bd7212789"} Jan 26 16:59:06 crc kubenswrapper[4865]: I0126 16:59:06.746877 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"42cf0a99f84058f0ff86a3647e1225db0dc06de6ded8a6b4ff1ae3deb3053afe"} Jan 26 16:59:06 crc kubenswrapper[4865]: I0126 16:59:06.746887 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0bc8c30a53fcf1bff7df02b26e78d76ace1a810ac91114fc8375964ebaf40f69"} Jan 26 16:59:06 crc kubenswrapper[4865]: I0126 16:59:06.754376 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 16:59:06 crc kubenswrapper[4865]: I0126 16:59:06.754525 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dd6ebfff92e3a1ad4c40d815f8117e5db792a90eb1d5759fce681d8310fd0015"} Jan 26 16:59:07 crc kubenswrapper[4865]: I0126 16:59:07.764762 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a25fe525f2049e1f0a9cd4d7544f79c621766bc0b90a6d9940fa612f917130bc"} Jan 26 16:59:07 crc kubenswrapper[4865]: I0126 16:59:07.765174 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"aa2049d2b1c69e32161d4fe006a72b7bf85ca05069b7bddf13d25b700780a911"} Jan 26 16:59:07 crc kubenswrapper[4865]: I0126 16:59:07.765475 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:07 crc kubenswrapper[4865]: I0126 16:59:07.765493 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:07 crc kubenswrapper[4865]: I0126 16:59:07.765721 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:08 crc kubenswrapper[4865]: I0126 16:59:08.295690 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:59:08 crc kubenswrapper[4865]: I0126 16:59:08.334711 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 16:59:08 crc kubenswrapper[4865]: I0126 16:59:08.677223 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:59:08 crc kubenswrapper[4865]: I0126 16:59:08.716424 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 16:59:09 crc kubenswrapper[4865]: I0126 16:59:09.383366 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:09 crc kubenswrapper[4865]: I0126 16:59:09.383445 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:09 crc kubenswrapper[4865]: I0126 16:59:09.392022 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:11 crc kubenswrapper[4865]: I0126 16:59:11.630933 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:59:11 crc kubenswrapper[4865]: I0126 16:59:11.946479 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:59:11 crc kubenswrapper[4865]: I0126 16:59:11.947049 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 26 16:59:11 crc kubenswrapper[4865]: I0126 16:59:11.947284 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 26 16:59:12 crc kubenswrapper[4865]: I0126 16:59:12.788350 4865 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:13 crc kubenswrapper[4865]: I0126 16:59:13.801208 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:13 crc kubenswrapper[4865]: I0126 16:59:13.801811 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:13 crc kubenswrapper[4865]: I0126 16:59:13.805522 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:14 crc kubenswrapper[4865]: I0126 16:59:14.375050 4865 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="82ed02ac-ace5-48e1-8a94-68672c7cb2a1" Jan 26 16:59:14 crc kubenswrapper[4865]: I0126 16:59:14.806229 4865 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:14 crc kubenswrapper[4865]: I0126 16:59:14.806260 4865 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="8caee532-e315-445d-b84c-730aaa98f649" Jan 26 16:59:14 crc kubenswrapper[4865]: I0126 16:59:14.810609 4865 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="82ed02ac-ace5-48e1-8a94-68672c7cb2a1" Jan 26 16:59:21 crc kubenswrapper[4865]: I0126 16:59:21.946743 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 26 16:59:21 crc kubenswrapper[4865]: I0126 16:59:21.947600 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 26 16:59:22 crc kubenswrapper[4865]: I0126 16:59:22.838977 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 16:59:23 crc kubenswrapper[4865]: I0126 16:59:23.302656 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 16:59:23 crc kubenswrapper[4865]: I0126 16:59:23.788162 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.100265 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.260437 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.378662 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.512101 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.614768 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.632242 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.643764 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.661252 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.744837 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.922274 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.942853 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 16:59:24 crc kubenswrapper[4865]: I0126 16:59:24.981560 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.254122 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.362646 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.450438 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.485596 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.552074 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.568355 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.836670 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.968045 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 16:59:25 crc kubenswrapper[4865]: I0126 16:59:25.980528 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.011245 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.324725 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.370950 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.423418 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.470901 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.514090 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.517090 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.520977 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.521631 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.583135 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.587018 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.622473 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.664122 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.673401 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.807426 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.829972 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.844620 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.875062 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.895627 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.902930 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.980063 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 16:59:26 crc kubenswrapper[4865]: I0126 16:59:26.996186 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.005206 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.058746 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.111954 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.134852 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.221502 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.242692 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.265824 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.278537 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.294928 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.327879 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.348943 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.358531 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.428061 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.487228 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.688637 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.731128 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.753185 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.767919 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.801662 4865 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.805484 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.941446 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.951859 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.961326 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 16:59:27 crc kubenswrapper[4865]: I0126 16:59:27.981434 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.046291 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.054317 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.062452 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.148431 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.153025 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.181973 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.261490 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.271015 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.324814 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.332838 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.354605 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.702202 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.718846 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.809490 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.862473 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.865038 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 16:59:28 crc kubenswrapper[4865]: I0126 16:59:28.883401 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.039862 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.060785 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.226400 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.279842 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.428965 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.448457 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.474767 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.517857 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.635876 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.728899 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.880459 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 16:59:29 crc kubenswrapper[4865]: I0126 16:59:29.982229 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.130970 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.138852 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.139822 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.150431 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.152202 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.321035 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.334106 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.365692 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.452102 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.470120 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.501432 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.528622 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.626871 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.673205 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.675633 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.683987 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.712288 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.730667 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.879946 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.913042 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.922555 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 16:59:30 crc kubenswrapper[4865]: I0126 16:59:30.922780 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.006322 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.039108 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.116559 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.142097 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.163414 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.224264 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.251254 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.309902 4865 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.312228 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hxg9k" podStartSLOduration=46.012312033 podStartE2EDuration="2m14.312206768s" podCreationTimestamp="2026-01-26 16:57:17 +0000 UTC" firstStartedPulling="2026-01-26 16:57:20.916428345 +0000 UTC m=+168.500313932" lastFinishedPulling="2026-01-26 16:58:49.21632308 +0000 UTC m=+256.800208667" observedRunningTime="2026-01-26 16:59:12.940121517 +0000 UTC m=+280.524007124" watchObservedRunningTime="2026-01-26 16:59:31.312206768 +0000 UTC m=+298.896092355" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.312643 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pg69s" podStartSLOduration=39.736858336 podStartE2EDuration="2m13.3126379s" podCreationTimestamp="2026-01-26 16:57:18 +0000 UTC" firstStartedPulling="2026-01-26 16:57:20.94344292 +0000 UTC m=+168.527328507" lastFinishedPulling="2026-01-26 16:58:54.519222484 +0000 UTC m=+262.103108071" observedRunningTime="2026-01-26 16:59:12.957541871 +0000 UTC m=+280.541427468" watchObservedRunningTime="2026-01-26 16:59:31.3126379 +0000 UTC m=+298.896523487" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.314709 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6vslt" podStartSLOduration=41.408174512 podStartE2EDuration="2m16.31470269s" podCreationTimestamp="2026-01-26 16:57:15 +0000 UTC" firstStartedPulling="2026-01-26 16:57:19.718200557 +0000 UTC m=+167.302086134" lastFinishedPulling="2026-01-26 16:58:54.624728725 +0000 UTC m=+262.208614312" observedRunningTime="2026-01-26 16:59:12.829823457 +0000 UTC m=+280.413709044" watchObservedRunningTime="2026-01-26 16:59:31.31470269 +0000 UTC m=+298.898588277" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.315104 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fkmhm" podStartSLOduration=40.200223073 podStartE2EDuration="2m16.315096921s" podCreationTimestamp="2026-01-26 16:57:15 +0000 UTC" firstStartedPulling="2026-01-26 16:57:18.512467843 +0000 UTC m=+166.096353420" lastFinishedPulling="2026-01-26 16:58:54.627341681 +0000 UTC m=+262.211227268" observedRunningTime="2026-01-26 16:59:12.814168694 +0000 UTC m=+280.398054291" watchObservedRunningTime="2026-01-26 16:59:31.315096921 +0000 UTC m=+298.898982508" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.315740 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-npnhh","openshift-marketplace/redhat-marketplace-d5vtl"] Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.315808 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.321723 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.341726 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.341699941 podStartE2EDuration="19.341699941s" podCreationTimestamp="2026-01-26 16:59:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:59:31.339897079 +0000 UTC m=+298.923782666" watchObservedRunningTime="2026-01-26 16:59:31.341699941 +0000 UTC m=+298.925585518" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.361420 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.364535 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.424936 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.439176 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.441366 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.563155 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.601707 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.632712 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.634434 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.791200 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.844635 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.884268 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.946162 4865 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.946253 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.946344 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.947329 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"dd6ebfff92e3a1ad4c40d815f8117e5db792a90eb1d5759fce681d8310fd0015"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.947475 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://dd6ebfff92e3a1ad4c40d815f8117e5db792a90eb1d5759fce681d8310fd0015" gracePeriod=30 Jan 26 16:59:31 crc kubenswrapper[4865]: I0126 16:59:31.993848 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.083480 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.189552 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.230718 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.312316 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.345851 4865 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.367462 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" path="/var/lib/kubelet/pods/109ec94e-2b65-4cc6-a53c-3f874401fd5e/volumes" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.368245 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" path="/var/lib/kubelet/pods/5d8bdd7f-0bfa-41ae-b684-7d96b15b7043/volumes" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.581014 4865 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.723208 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.837642 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.856541 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.935255 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.970087 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 16:59:32 crc kubenswrapper[4865]: I0126 16:59:32.993549 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.016651 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.133400 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.208066 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.226189 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.284669 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.289945 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.305127 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.305793 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.421570 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.423422 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.536399 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.858449 4865 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 26 16:59:33 crc kubenswrapper[4865]: I0126 16:59:33.921679 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.030258 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.061763 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.078466 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.110450 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.129890 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.147909 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.180724 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.232400 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.271282 4865 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.271646 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24" gracePeriod=5 Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.304175 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.374604 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.457750 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.462789 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.585236 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.665784 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.730374 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.762662 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.807256 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.821346 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.834378 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.879454 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.906754 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 16:59:34 crc kubenswrapper[4865]: I0126 16:59:34.990669 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.018312 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.177974 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.218276 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.228243 4865 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.262080 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.376153 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.471381 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.485314 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.736245 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.823124 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.959412 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 16:59:35 crc kubenswrapper[4865]: I0126 16:59:35.993916 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.026531 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.145239 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.156854 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.225499 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.565361 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.685202 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.775574 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.898791 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.988839 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.989700 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-68755f559b-nccsz"] Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991040 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991072 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991088 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" containerName="installer" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991098 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" containerName="installer" Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991109 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="extract-utilities" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991117 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="extract-utilities" Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991129 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991136 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991145 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="extract-content" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991152 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="extract-content" Jan 26 16:59:36 crc kubenswrapper[4865]: E0126 16:59:36.991167 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="registry-server" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991174 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="registry-server" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991296 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991309 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d8bdd7f-0bfa-41ae-b684-7d96b15b7043" containerName="oauth-openshift" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991325 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="109ec94e-2b65-4cc6-a53c-3f874401fd5e" containerName="registry-server" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.991334 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c6a6074-31d0-4bf7-a237-e9b4e2e17de7" containerName="installer" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.993266 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:36 crc kubenswrapper[4865]: I0126 16:59:36.995649 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001251 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001341 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001536 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001617 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001697 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001250 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001820 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.001827 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.002513 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.002731 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.007688 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.008019 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.008810 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68755f559b-nccsz"] Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.017271 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.024108 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.026131 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093196 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093254 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-dir\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093370 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093461 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-router-certs\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093499 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-error\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093588 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqd4s\" (UniqueName: \"kubernetes.io/projected/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-kube-api-access-bqd4s\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093612 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-login\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093646 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.093923 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-policies\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.094140 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.094234 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-service-ca\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.094335 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.094438 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-session\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.094512 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.136772 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.195562 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqd4s\" (UniqueName: \"kubernetes.io/projected/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-kube-api-access-bqd4s\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.195635 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-login\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.195683 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.195737 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-policies\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.209812 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.209926 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-service-ca\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210015 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210103 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-session\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210215 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210260 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210293 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-dir\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210316 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210347 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-router-certs\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.210378 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-error\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.197517 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.212428 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-dir\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.206409 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-login\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.213939 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-service-ca\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.213971 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.209607 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-audit-policies\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.215158 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-error\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.215365 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.216149 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-session\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.217826 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-router-certs\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.217931 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.218473 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.236007 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.241780 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqd4s\" (UniqueName: \"kubernetes.io/projected/590520ba-45e3-4cb1-9079-fd4aa7bcb9b6-kube-api-access-bqd4s\") pod \"oauth-openshift-68755f559b-nccsz\" (UID: \"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6\") " pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.264409 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.270531 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.319634 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.454103 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.587799 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.588698 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.594779 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.614685 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.651151 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.712772 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.770957 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.774717 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.868847 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-68755f559b-nccsz"] Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.872262 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.913202 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.953136 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" event={"ID":"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6","Type":"ContainerStarted","Data":"74dd39885af954e37a51b28763b32223c24caacc9788628cc2adfc282c39a57a"} Jan 26 16:59:37 crc kubenswrapper[4865]: I0126 16:59:37.957289 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.030338 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.298509 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.433473 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.962302 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" event={"ID":"590520ba-45e3-4cb1-9079-fd4aa7bcb9b6","Type":"ContainerStarted","Data":"40555780c4ccf76e3b09dc1ae66c56b7ef16f4065218675c708cf7109f76a651"} Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.962765 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.968771 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" Jan 26 16:59:38 crc kubenswrapper[4865]: I0126 16:59:38.996576 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-68755f559b-nccsz" podStartSLOduration=65.996530111 podStartE2EDuration="1m5.996530111s" podCreationTimestamp="2026-01-26 16:58:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 16:59:38.985638866 +0000 UTC m=+306.569524483" watchObservedRunningTime="2026-01-26 16:59:38.996530111 +0000 UTC m=+306.580415698" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.076923 4865 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.346731 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.857768 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.857903 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956433 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956500 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956574 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956574 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956598 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956618 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956624 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956710 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.956822 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.957211 4865 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.957245 4865 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.957256 4865 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.957269 4865 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.969480 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.974048 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.974095 4865 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24" exitCode=137 Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.974216 4865 scope.go:117] "RemoveContainer" containerID="a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24" Jan 26 16:59:39 crc kubenswrapper[4865]: I0126 16:59:39.974247 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 16:59:40 crc kubenswrapper[4865]: I0126 16:59:40.011093 4865 scope.go:117] "RemoveContainer" containerID="a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24" Jan 26 16:59:40 crc kubenswrapper[4865]: E0126 16:59:40.011681 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24\": container with ID starting with a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24 not found: ID does not exist" containerID="a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24" Jan 26 16:59:40 crc kubenswrapper[4865]: I0126 16:59:40.011725 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24"} err="failed to get container status \"a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24\": rpc error: code = NotFound desc = could not find container \"a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24\": container with ID starting with a0131cc4c4c0f4f31a777307e57e84744a9bc427c1965c4a18d074e4bca49b24 not found: ID does not exist" Jan 26 16:59:40 crc kubenswrapper[4865]: I0126 16:59:40.058642 4865 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 16:59:40 crc kubenswrapper[4865]: I0126 16:59:40.367912 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 26 16:59:52 crc kubenswrapper[4865]: I0126 16:59:52.502294 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 16:59:56 crc kubenswrapper[4865]: I0126 16:59:56.399878 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 17:00:00 crc kubenswrapper[4865]: I0126 17:00:00.919393 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.180159 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.182023 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.182115 4865 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="dd6ebfff92e3a1ad4c40d815f8117e5db792a90eb1d5759fce681d8310fd0015" exitCode=137 Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.182164 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"dd6ebfff92e3a1ad4c40d815f8117e5db792a90eb1d5759fce681d8310fd0015"} Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.182224 4865 scope.go:117] "RemoveContainer" containerID="504256c85edba518389bdf5fd47fcb43c583b784a202fde7975a25611f91ab90" Jan 26 17:00:02 crc kubenswrapper[4865]: I0126 17:00:02.195860 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 17:00:03 crc kubenswrapper[4865]: I0126 17:00:03.190342 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Jan 26 17:00:03 crc kubenswrapper[4865]: I0126 17:00:03.192713 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8adb554253a05d19dfb678479eae1fcba9c9eaf805cb6068dc8abbf511b8db5c"} Jan 26 17:00:03 crc kubenswrapper[4865]: I0126 17:00:03.560612 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 17:00:03 crc kubenswrapper[4865]: I0126 17:00:03.618689 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 17:00:08 crc kubenswrapper[4865]: I0126 17:00:08.355459 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 17:00:11 crc kubenswrapper[4865]: I0126 17:00:11.631503 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 17:00:11 crc kubenswrapper[4865]: I0126 17:00:11.946101 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 17:00:11 crc kubenswrapper[4865]: I0126 17:00:11.954372 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 17:00:12 crc kubenswrapper[4865]: I0126 17:00:12.252081 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.486223 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8"] Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.487619 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.489292 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.489742 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.503162 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.517987 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8"] Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.595028 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.595136 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.595173 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvld5\" (UniqueName: \"kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.596416 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.596742 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerName="route-controller-manager" containerID="cri-o://befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa" gracePeriod=30 Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.604633 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.605116 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerName="controller-manager" containerID="cri-o://68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5" gracePeriod=30 Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.696726 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.696801 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvld5\" (UniqueName: \"kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.696894 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.698067 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.705425 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.716513 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvld5\" (UniqueName: \"kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5\") pod \"collect-profiles-29490780-2r9h8\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.807257 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:22 crc kubenswrapper[4865]: I0126 17:00:22.988777 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.051573 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8"] Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.056912 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 17:00:23 crc kubenswrapper[4865]: W0126 17:00:23.057248 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab4b68d1_57e5_46f4_987f_309500566c82.slice/crio-ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8 WatchSource:0}: Error finding container ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8: Status 404 returned error can't find the container with id ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8 Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.104738 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config\") pod \"8731191c-a1c3-4422-bcac-e6e6cfec649f\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.104813 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca\") pod \"8731191c-a1c3-4422-bcac-e6e6cfec649f\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.104850 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert\") pod \"8731191c-a1c3-4422-bcac-e6e6cfec649f\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.104879 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pq68\" (UniqueName: \"kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68\") pod \"8731191c-a1c3-4422-bcac-e6e6cfec649f\" (UID: \"8731191c-a1c3-4422-bcac-e6e6cfec649f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.105832 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca" (OuterVolumeSpecName: "client-ca") pod "8731191c-a1c3-4422-bcac-e6e6cfec649f" (UID: "8731191c-a1c3-4422-bcac-e6e6cfec649f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.106145 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.106461 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config" (OuterVolumeSpecName: "config") pod "8731191c-a1c3-4422-bcac-e6e6cfec649f" (UID: "8731191c-a1c3-4422-bcac-e6e6cfec649f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.110409 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8731191c-a1c3-4422-bcac-e6e6cfec649f" (UID: "8731191c-a1c3-4422-bcac-e6e6cfec649f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.111789 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68" (OuterVolumeSpecName: "kube-api-access-6pq68") pod "8731191c-a1c3-4422-bcac-e6e6cfec649f" (UID: "8731191c-a1c3-4422-bcac-e6e6cfec649f"). InnerVolumeSpecName "kube-api-access-6pq68". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.206910 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca\") pod \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207234 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles\") pod \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207275 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert\") pod \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207336 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcrms\" (UniqueName: \"kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms\") pod \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207417 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config\") pod \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\" (UID: \"3452d3f1-d080-412d-a1da-0dc0d3776d5f\") " Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207631 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8731191c-a1c3-4422-bcac-e6e6cfec649f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207642 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pq68\" (UniqueName: \"kubernetes.io/projected/8731191c-a1c3-4422-bcac-e6e6cfec649f-kube-api-access-6pq68\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207652 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8731191c-a1c3-4422-bcac-e6e6cfec649f-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207727 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "3452d3f1-d080-412d-a1da-0dc0d3776d5f" (UID: "3452d3f1-d080-412d-a1da-0dc0d3776d5f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.207757 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca" (OuterVolumeSpecName: "client-ca") pod "3452d3f1-d080-412d-a1da-0dc0d3776d5f" (UID: "3452d3f1-d080-412d-a1da-0dc0d3776d5f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.208288 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config" (OuterVolumeSpecName: "config") pod "3452d3f1-d080-412d-a1da-0dc0d3776d5f" (UID: "3452d3f1-d080-412d-a1da-0dc0d3776d5f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.211240 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "3452d3f1-d080-412d-a1da-0dc0d3776d5f" (UID: "3452d3f1-d080-412d-a1da-0dc0d3776d5f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.211255 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms" (OuterVolumeSpecName: "kube-api-access-wcrms") pod "3452d3f1-d080-412d-a1da-0dc0d3776d5f" (UID: "3452d3f1-d080-412d-a1da-0dc0d3776d5f"). InnerVolumeSpecName "kube-api-access-wcrms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.308671 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcrms\" (UniqueName: \"kubernetes.io/projected/3452d3f1-d080-412d-a1da-0dc0d3776d5f-kube-api-access-wcrms\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.308718 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.308730 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.308743 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/3452d3f1-d080-412d-a1da-0dc0d3776d5f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.308754 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3452d3f1-d080-412d-a1da-0dc0d3776d5f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.321205 4865 generic.go:334] "Generic (PLEG): container finished" podID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerID="68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5" exitCode=0 Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.321261 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.321297 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" event={"ID":"3452d3f1-d080-412d-a1da-0dc0d3776d5f","Type":"ContainerDied","Data":"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.321328 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-nknkn" event={"ID":"3452d3f1-d080-412d-a1da-0dc0d3776d5f","Type":"ContainerDied","Data":"84df4ea757b9d176cc74612c702effa97b11248e7d73714c966a9c1228ca50c5"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.321348 4865 scope.go:117] "RemoveContainer" containerID="68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.323742 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" event={"ID":"ab4b68d1-57e5-46f4-987f-309500566c82","Type":"ContainerStarted","Data":"218744c7c97b965e2d74d84a843b2b580cc79670796c0b5ee6fe1da02dea8ef5"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.323778 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" event={"ID":"ab4b68d1-57e5-46f4-987f-309500566c82","Type":"ContainerStarted","Data":"ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.329380 4865 generic.go:334] "Generic (PLEG): container finished" podID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerID="befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa" exitCode=0 Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.329426 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" event={"ID":"8731191c-a1c3-4422-bcac-e6e6cfec649f","Type":"ContainerDied","Data":"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.329451 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" event={"ID":"8731191c-a1c3-4422-bcac-e6e6cfec649f","Type":"ContainerDied","Data":"60509dbd7fe35764fe639c159f5687be95ef5be5b1d76e630aab4e49f76d0ada"} Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.329449 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.342466 4865 scope.go:117] "RemoveContainer" containerID="68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5" Jan 26 17:00:23 crc kubenswrapper[4865]: E0126 17:00:23.342950 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5\": container with ID starting with 68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5 not found: ID does not exist" containerID="68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.342983 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5"} err="failed to get container status \"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5\": rpc error: code = NotFound desc = could not find container \"68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5\": container with ID starting with 68eb6663e8bbb67558745b6d2b8dcfde20b98c9205297d3a9edf34b4e5cee2d5 not found: ID does not exist" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.343027 4865 scope.go:117] "RemoveContainer" containerID="befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.365497 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" podStartSLOduration=1.365477726 podStartE2EDuration="1.365477726s" podCreationTimestamp="2026-01-26 17:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:00:23.362219899 +0000 UTC m=+350.946105496" watchObservedRunningTime="2026-01-26 17:00:23.365477726 +0000 UTC m=+350.949363313" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.384431 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.388873 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-nknkn"] Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.397490 4865 scope.go:117] "RemoveContainer" containerID="befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa" Jan 26 17:00:23 crc kubenswrapper[4865]: E0126 17:00:23.401172 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa\": container with ID starting with befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa not found: ID does not exist" containerID="befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.401230 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa"} err="failed to get container status \"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa\": rpc error: code = NotFound desc = could not find container \"befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa\": container with ID starting with befd37012960e11b2be875011dec9ed58a1d42a2a8b7d0bb483ec83452c1edfa not found: ID does not exist" Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.412303 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 17:00:23 crc kubenswrapper[4865]: I0126 17:00:23.419631 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6xswb"] Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.037695 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:24 crc kubenswrapper[4865]: E0126 17:00:24.037977 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerName="controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.037996 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerName="controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: E0126 17:00:24.038019 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerName="route-controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.038074 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerName="route-controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.038198 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" containerName="route-controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.038220 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" containerName="controller-manager" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.038649 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.041504 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.042168 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.042538 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.043425 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.044460 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.046217 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.046352 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.046665 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.046916 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.047154 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.047282 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.047407 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.047915 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.050615 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.058440 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.062693 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.066176 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.137914 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138011 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138046 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138080 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138110 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz95n\" (UniqueName: \"kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138153 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138183 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138268 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnb7x\" (UniqueName: \"kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.138300 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.239041 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnb7x\" (UniqueName: \"kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.239141 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.239178 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.240827 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.240942 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.240981 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.241047 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz95n\" (UniqueName: \"kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.241097 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.241120 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.241609 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.242439 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.242596 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.244221 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.244616 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.246357 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.247910 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.261441 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnb7x\" (UniqueName: \"kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x\") pod \"controller-manager-7bdc655bf5-8gwtt\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.265604 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz95n\" (UniqueName: \"kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n\") pod \"route-controller-manager-6b5549788c-qw5fs\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.350658 4865 generic.go:334] "Generic (PLEG): container finished" podID="ab4b68d1-57e5-46f4-987f-309500566c82" containerID="218744c7c97b965e2d74d84a843b2b580cc79670796c0b5ee6fe1da02dea8ef5" exitCode=0 Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.351114 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" event={"ID":"ab4b68d1-57e5-46f4-987f-309500566c82","Type":"ContainerDied","Data":"218744c7c97b965e2d74d84a843b2b580cc79670796c0b5ee6fe1da02dea8ef5"} Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.355806 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.369552 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3452d3f1-d080-412d-a1da-0dc0d3776d5f" path="/var/lib/kubelet/pods/3452d3f1-d080-412d-a1da-0dc0d3776d5f/volumes" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.370595 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8731191c-a1c3-4422-bcac-e6e6cfec649f" path="/var/lib/kubelet/pods/8731191c-a1c3-4422-bcac-e6e6cfec649f/volumes" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.377712 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.585424 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:00:24 crc kubenswrapper[4865]: W0126 17:00:24.597926 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85195773_149b_4dcc_a3cc_3d7c5f063e1d.slice/crio-43daefb74a0e4b92f3c56548fcf2103acd89f6651d89a080998a705a15e91502 WatchSource:0}: Error finding container 43daefb74a0e4b92f3c56548fcf2103acd89f6651d89a080998a705a15e91502: Status 404 returned error can't find the container with id 43daefb74a0e4b92f3c56548fcf2103acd89f6651d89a080998a705a15e91502 Jan 26 17:00:24 crc kubenswrapper[4865]: I0126 17:00:24.627824 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:24 crc kubenswrapper[4865]: W0126 17:00:24.632784 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5184966_efe6_4c65_a6a3_fb982bb28bdb.slice/crio-f956e98ced680370698527bdbdd6b896737ef1ca682b0115accdc63125ecd103 WatchSource:0}: Error finding container f956e98ced680370698527bdbdd6b896737ef1ca682b0115accdc63125ecd103: Status 404 returned error can't find the container with id f956e98ced680370698527bdbdd6b896737ef1ca682b0115accdc63125ecd103 Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.369852 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" event={"ID":"d5184966-efe6-4c65-a6a3-fb982bb28bdb","Type":"ContainerStarted","Data":"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9"} Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.369911 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" event={"ID":"d5184966-efe6-4c65-a6a3-fb982bb28bdb","Type":"ContainerStarted","Data":"f956e98ced680370698527bdbdd6b896737ef1ca682b0115accdc63125ecd103"} Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.370248 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.372696 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" event={"ID":"85195773-149b-4dcc-a3cc-3d7c5f063e1d","Type":"ContainerStarted","Data":"313901d12b4ee1fa0351e017156b97ddad74bdba716258e1a362577a3f752362"} Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.372771 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" event={"ID":"85195773-149b-4dcc-a3cc-3d7c5f063e1d","Type":"ContainerStarted","Data":"43daefb74a0e4b92f3c56548fcf2103acd89f6651d89a080998a705a15e91502"} Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.422259 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" podStartSLOduration=3.422031146 podStartE2EDuration="3.422031146s" podCreationTimestamp="2026-01-26 17:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:00:25.421442948 +0000 UTC m=+353.005328535" watchObservedRunningTime="2026-01-26 17:00:25.422031146 +0000 UTC m=+353.005916733" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.424094 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" podStartSLOduration=3.424082067 podStartE2EDuration="3.424082067s" podCreationTimestamp="2026-01-26 17:00:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:00:25.396665069 +0000 UTC m=+352.980550656" watchObservedRunningTime="2026-01-26 17:00:25.424082067 +0000 UTC m=+353.007967654" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.643467 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.645765 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.774041 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume\") pod \"ab4b68d1-57e5-46f4-987f-309500566c82\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.774179 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume\") pod \"ab4b68d1-57e5-46f4-987f-309500566c82\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.774236 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvld5\" (UniqueName: \"kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5\") pod \"ab4b68d1-57e5-46f4-987f-309500566c82\" (UID: \"ab4b68d1-57e5-46f4-987f-309500566c82\") " Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.774733 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume" (OuterVolumeSpecName: "config-volume") pod "ab4b68d1-57e5-46f4-987f-309500566c82" (UID: "ab4b68d1-57e5-46f4-987f-309500566c82"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.779788 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ab4b68d1-57e5-46f4-987f-309500566c82" (UID: "ab4b68d1-57e5-46f4-987f-309500566c82"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.779969 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5" (OuterVolumeSpecName: "kube-api-access-bvld5") pod "ab4b68d1-57e5-46f4-987f-309500566c82" (UID: "ab4b68d1-57e5-46f4-987f-309500566c82"). InnerVolumeSpecName "kube-api-access-bvld5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.875985 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvld5\" (UniqueName: \"kubernetes.io/projected/ab4b68d1-57e5-46f4-987f-309500566c82-kube-api-access-bvld5\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.876962 4865 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ab4b68d1-57e5-46f4-987f-309500566c82-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:25 crc kubenswrapper[4865]: I0126 17:00:25.876976 4865 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ab4b68d1-57e5-46f4-987f-309500566c82-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:26 crc kubenswrapper[4865]: I0126 17:00:26.380868 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" event={"ID":"ab4b68d1-57e5-46f4-987f-309500566c82","Type":"ContainerDied","Data":"ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8"} Jan 26 17:00:26 crc kubenswrapper[4865]: I0126 17:00:26.380915 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea4204eaf9f886b8588b9ec52ad52b36a62d306a321b2a5fa951bcba11dc1eb8" Jan 26 17:00:26 crc kubenswrapper[4865]: I0126 17:00:26.380968 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490780-2r9h8" Jan 26 17:00:26 crc kubenswrapper[4865]: I0126 17:00:26.381223 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:26 crc kubenswrapper[4865]: I0126 17:00:26.387120 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:00:32 crc kubenswrapper[4865]: I0126 17:00:32.762451 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 17:00:32 crc kubenswrapper[4865]: I0126 17:00:32.770892 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fkmhm" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="registry-server" containerID="cri-o://1ac6269cf415b0490fe6323374b8d8fce3bee37d104fe02f4eef033f92e9dbdd" gracePeriod=2 Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.423937 4865 generic.go:334] "Generic (PLEG): container finished" podID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerID="1ac6269cf415b0490fe6323374b8d8fce3bee37d104fe02f4eef033f92e9dbdd" exitCode=0 Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.424038 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerDied","Data":"1ac6269cf415b0490fe6323374b8d8fce3bee37d104fe02f4eef033f92e9dbdd"} Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.768526 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.894614 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities\") pod \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.894682 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content\") pod \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.894767 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nhjm\" (UniqueName: \"kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm\") pod \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\" (UID: \"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf\") " Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.895564 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities" (OuterVolumeSpecName: "utilities") pod "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" (UID: "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.901241 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm" (OuterVolumeSpecName: "kube-api-access-7nhjm") pod "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" (UID: "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf"). InnerVolumeSpecName "kube-api-access-7nhjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.941590 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" (UID: "ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.995813 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.997872 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:33 crc kubenswrapper[4865]: I0126 17:00:33.997895 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nhjm\" (UniqueName: \"kubernetes.io/projected/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf-kube-api-access-7nhjm\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.433312 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fkmhm" event={"ID":"ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf","Type":"ContainerDied","Data":"b77351e4d4d08b7aa8f664a2981356f5e5876ab56a06be20d0b5541d20d177b3"} Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.433375 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fkmhm" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.433886 4865 scope.go:117] "RemoveContainer" containerID="1ac6269cf415b0490fe6323374b8d8fce3bee37d104fe02f4eef033f92e9dbdd" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.457493 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.459806 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fkmhm"] Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.459902 4865 scope.go:117] "RemoveContainer" containerID="d17f97b8b34a236431f2df8eaf3aba2e41f97dc9b1512fa84044f3fd8da2d3b3" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.488237 4865 scope.go:117] "RemoveContainer" containerID="4255e619441f82281bf484b7eaf5f429412ee79230e309dda58d4113c9c084e1" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.512410 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.512583 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.556308 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 17:00:34 crc kubenswrapper[4865]: I0126 17:00:34.556772 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6vslt" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="registry-server" containerID="cri-o://be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76" gracePeriod=2 Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.016848 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vslt" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.111186 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk767\" (UniqueName: \"kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767\") pod \"203f4245-9105-442f-a78b-dc354926516b\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.111362 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content\") pod \"203f4245-9105-442f-a78b-dc354926516b\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.111410 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities\") pod \"203f4245-9105-442f-a78b-dc354926516b\" (UID: \"203f4245-9105-442f-a78b-dc354926516b\") " Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.112222 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities" (OuterVolumeSpecName: "utilities") pod "203f4245-9105-442f-a78b-dc354926516b" (UID: "203f4245-9105-442f-a78b-dc354926516b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.124756 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767" (OuterVolumeSpecName: "kube-api-access-pk767") pod "203f4245-9105-442f-a78b-dc354926516b" (UID: "203f4245-9105-442f-a78b-dc354926516b"). InnerVolumeSpecName "kube-api-access-pk767". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.158734 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.159920 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pg69s" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="registry-server" containerID="cri-o://31f2dc7237b175955b2b9bdc7f325ece04182095f775ce4031678e45e333d8fe" gracePeriod=2 Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.169415 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "203f4245-9105-442f-a78b-dc354926516b" (UID: "203f4245-9105-442f-a78b-dc354926516b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.213077 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk767\" (UniqueName: \"kubernetes.io/projected/203f4245-9105-442f-a78b-dc354926516b-kube-api-access-pk767\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.213109 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.213120 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/203f4245-9105-442f-a78b-dc354926516b-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.440832 4865 generic.go:334] "Generic (PLEG): container finished" podID="e402709d-4ea5-403d-af16-3ab178841b35" containerID="31f2dc7237b175955b2b9bdc7f325ece04182095f775ce4031678e45e333d8fe" exitCode=0 Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.440886 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerDied","Data":"31f2dc7237b175955b2b9bdc7f325ece04182095f775ce4031678e45e333d8fe"} Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.443288 4865 generic.go:334] "Generic (PLEG): container finished" podID="203f4245-9105-442f-a78b-dc354926516b" containerID="be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76" exitCode=0 Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.443380 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vslt" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.443366 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerDied","Data":"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76"} Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.443506 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vslt" event={"ID":"203f4245-9105-442f-a78b-dc354926516b","Type":"ContainerDied","Data":"79ff78f6a9302f566f01c172407c2668e21ae1c10e1c6c064a11ba6922703273"} Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.443545 4865 scope.go:117] "RemoveContainer" containerID="be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.464397 4865 scope.go:117] "RemoveContainer" containerID="90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.475556 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.479986 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6vslt"] Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.497079 4865 scope.go:117] "RemoveContainer" containerID="f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.512821 4865 scope.go:117] "RemoveContainer" containerID="be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76" Jan 26 17:00:35 crc kubenswrapper[4865]: E0126 17:00:35.513227 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76\": container with ID starting with be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76 not found: ID does not exist" containerID="be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.513323 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76"} err="failed to get container status \"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76\": rpc error: code = NotFound desc = could not find container \"be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76\": container with ID starting with be165c1391dbc93939ad27f324aa6081cbd8614e3bff602b46159fa9f189bb76 not found: ID does not exist" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.513434 4865 scope.go:117] "RemoveContainer" containerID="90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12" Jan 26 17:00:35 crc kubenswrapper[4865]: E0126 17:00:35.513767 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12\": container with ID starting with 90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12 not found: ID does not exist" containerID="90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.513813 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12"} err="failed to get container status \"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12\": rpc error: code = NotFound desc = could not find container \"90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12\": container with ID starting with 90e15f16a7be5dfce2dbe99bca17ee42b13a35cde2d3441e422b202866806f12 not found: ID does not exist" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.513844 4865 scope.go:117] "RemoveContainer" containerID="f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6" Jan 26 17:00:35 crc kubenswrapper[4865]: E0126 17:00:35.514124 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6\": container with ID starting with f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6 not found: ID does not exist" containerID="f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6" Jan 26 17:00:35 crc kubenswrapper[4865]: I0126 17:00:35.514148 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6"} err="failed to get container status \"f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6\": rpc error: code = NotFound desc = could not find container \"f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6\": container with ID starting with f9ebd85955f9ca9c17a6a17b309772381a8976daa567d9337af20b8b0d984cf6 not found: ID does not exist" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.218907 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.346465 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rchs4\" (UniqueName: \"kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4\") pod \"e402709d-4ea5-403d-af16-3ab178841b35\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.346727 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content\") pod \"e402709d-4ea5-403d-af16-3ab178841b35\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.346812 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities\") pod \"e402709d-4ea5-403d-af16-3ab178841b35\" (UID: \"e402709d-4ea5-403d-af16-3ab178841b35\") " Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.348081 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities" (OuterVolumeSpecName: "utilities") pod "e402709d-4ea5-403d-af16-3ab178841b35" (UID: "e402709d-4ea5-403d-af16-3ab178841b35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.351739 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4" (OuterVolumeSpecName: "kube-api-access-rchs4") pod "e402709d-4ea5-403d-af16-3ab178841b35" (UID: "e402709d-4ea5-403d-af16-3ab178841b35"). InnerVolumeSpecName "kube-api-access-rchs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.364872 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="203f4245-9105-442f-a78b-dc354926516b" path="/var/lib/kubelet/pods/203f4245-9105-442f-a78b-dc354926516b/volumes" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.365890 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" path="/var/lib/kubelet/pods/ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf/volumes" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.447776 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rchs4\" (UniqueName: \"kubernetes.io/projected/e402709d-4ea5-403d-af16-3ab178841b35-kube-api-access-rchs4\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.448147 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.452229 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pg69s" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.452199 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pg69s" event={"ID":"e402709d-4ea5-403d-af16-3ab178841b35","Type":"ContainerDied","Data":"6590cef2a7f1cb1544986c097b1e906db70623745c1fa3226f89378c47bc3c18"} Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.452571 4865 scope.go:117] "RemoveContainer" containerID="31f2dc7237b175955b2b9bdc7f325ece04182095f775ce4031678e45e333d8fe" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.459816 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e402709d-4ea5-403d-af16-3ab178841b35" (UID: "e402709d-4ea5-403d-af16-3ab178841b35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.479730 4865 scope.go:117] "RemoveContainer" containerID="1fdc1af6f42220ef6f9fe4dcab666d0787bb7069b7272e868ab095b1d0c393df" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.496403 4865 scope.go:117] "RemoveContainer" containerID="506dc6f0514fc4a08d184451ec692c5c72ce381e34c1ff3b27e3b7afcfcba658" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.549109 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e402709d-4ea5-403d-af16-3ab178841b35-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.804318 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 17:00:36 crc kubenswrapper[4865]: I0126 17:00:36.809329 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pg69s"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.934405 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.935289 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-gw6ld" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="registry-server" containerID="cri-o://e3e0e69295103de391636c38ede4250e4e109f56322501446ce035b5c809b31a" gracePeriod=30 Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.955361 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.955771 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7x4jl" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="registry-server" containerID="cri-o://2f319958091fc0d9103daaf13741d642c5e7458be0a2050797d55e55e5f02b1e" gracePeriod=30 Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.967571 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.967924 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" containerID="cri-o://51ca7c97b44cf70fd6b579979b4622ce13160e317e74d5ad33e83a7b0bf15983" gracePeriod=30 Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.976082 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.978009 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r9jxm" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="registry-server" containerID="cri-o://d84edf2c216e585f647c74ee23cc9cf7d2e6450ac0a20757d2ffd2ff48519a17" gracePeriod=30 Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.984213 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 17:00:37 crc kubenswrapper[4865]: I0126 17:00:37.984571 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hxg9k" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" containerID="cri-o://e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" gracePeriod=30 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.002074 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9c8ld"] Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.002642 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.002751 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.002843 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003020 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003087 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003152 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003216 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003270 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003338 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003401 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003525 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003604 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="extract-utilities" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003684 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003753 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="extract-content" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003816 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4b68d1-57e5-46f4-987f-309500566c82" containerName="collect-profiles" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.003874 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4b68d1-57e5-46f4-987f-309500566c82" containerName="collect-profiles" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.003937 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004014 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.004087 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004145 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004315 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="e402709d-4ea5-403d-af16-3ab178841b35" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004380 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4b68d1-57e5-46f4-987f-309500566c82" containerName="collect-profiles" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004444 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebe88f8f-985c-4ceb-8cbe-8a9bcc3ee0cf" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.004501 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="203f4245-9105-442f-a78b-dc354926516b" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.005049 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.007404 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9c8ld"] Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.173490 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.173859 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.173880 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqbjn\" (UniqueName: \"kubernetes.io/projected/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-kube-api-access-xqbjn\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.256267 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47 is running failed: container process not found" containerID="e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.256774 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47 is running failed: container process not found" containerID="e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.257127 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47 is running failed: container process not found" containerID="e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:00:38 crc kubenswrapper[4865]: E0126 17:00:38.257227 4865 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-hxg9k" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.275673 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.275726 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqbjn\" (UniqueName: \"kubernetes.io/projected/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-kube-api-access-xqbjn\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.275814 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.277228 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.286151 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.306262 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqbjn\" (UniqueName: \"kubernetes.io/projected/50bc8e04-9801-4c19-9e84-ee8ea7c0cad4-kube-api-access-xqbjn\") pod \"marketplace-operator-79b997595-9c8ld\" (UID: \"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4\") " pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.365051 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e402709d-4ea5-403d-af16-3ab178841b35" path="/var/lib/kubelet/pods/e402709d-4ea5-403d-af16-3ab178841b35/volumes" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.407297 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.478048 4865 generic.go:334] "Generic (PLEG): container finished" podID="7337688e-9bf5-436c-be77-afad7fa093ed" containerID="d84edf2c216e585f647c74ee23cc9cf7d2e6450ac0a20757d2ffd2ff48519a17" exitCode=0 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.478120 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerDied","Data":"d84edf2c216e585f647c74ee23cc9cf7d2e6450ac0a20757d2ffd2ff48519a17"} Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.485912 4865 generic.go:334] "Generic (PLEG): container finished" podID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerID="e3e0e69295103de391636c38ede4250e4e109f56322501446ce035b5c809b31a" exitCode=0 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.486071 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerDied","Data":"e3e0e69295103de391636c38ede4250e4e109f56322501446ce035b5c809b31a"} Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.487651 4865 generic.go:334] "Generic (PLEG): container finished" podID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerID="51ca7c97b44cf70fd6b579979b4622ce13160e317e74d5ad33e83a7b0bf15983" exitCode=0 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.487751 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" event={"ID":"ce5bf85d-dd65-4174-8fdc-10988986d665","Type":"ContainerDied","Data":"51ca7c97b44cf70fd6b579979b4622ce13160e317e74d5ad33e83a7b0bf15983"} Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.502366 4865 generic.go:334] "Generic (PLEG): container finished" podID="28d973df-adc6-41b0-81b9-afd3a743641f" containerID="2f319958091fc0d9103daaf13741d642c5e7458be0a2050797d55e55e5f02b1e" exitCode=0 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.502451 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerDied","Data":"2f319958091fc0d9103daaf13741d642c5e7458be0a2050797d55e55e5f02b1e"} Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.517209 4865 generic.go:334] "Generic (PLEG): container finished" podID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerID="e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" exitCode=0 Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.517277 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerDied","Data":"e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47"} Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.518968 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.650883 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.664911 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.667463 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.681036 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities\") pod \"1fa30c32-b5b9-48db-baf7-761da95213f7\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.681118 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56lwr\" (UniqueName: \"kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr\") pod \"1fa30c32-b5b9-48db-baf7-761da95213f7\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.681156 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content\") pod \"1fa30c32-b5b9-48db-baf7-761da95213f7\" (UID: \"1fa30c32-b5b9-48db-baf7-761da95213f7\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.683409 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities" (OuterVolumeSpecName: "utilities") pod "1fa30c32-b5b9-48db-baf7-761da95213f7" (UID: "1fa30c32-b5b9-48db-baf7-761da95213f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.684900 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.687934 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr" (OuterVolumeSpecName: "kube-api-access-56lwr") pod "1fa30c32-b5b9-48db-baf7-761da95213f7" (UID: "1fa30c32-b5b9-48db-baf7-761da95213f7"). InnerVolumeSpecName "kube-api-access-56lwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.728941 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1fa30c32-b5b9-48db-baf7-761da95213f7" (UID: "1fa30c32-b5b9-48db-baf7-761da95213f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781760 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5mqf\" (UniqueName: \"kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf\") pod \"ea398908-abb3-4ce8-bbaf-a44b6350314d\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781810 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") pod \"ce5bf85d-dd65-4174-8fdc-10988986d665\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781845 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") pod \"ce5bf85d-dd65-4174-8fdc-10988986d665\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781869 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k587s\" (UniqueName: \"kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s\") pod \"7337688e-9bf5-436c-be77-afad7fa093ed\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781895 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content\") pod \"28d973df-adc6-41b0-81b9-afd3a743641f\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781943 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities\") pod \"7337688e-9bf5-436c-be77-afad7fa093ed\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.781973 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5c5r\" (UniqueName: \"kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r\") pod \"ce5bf85d-dd65-4174-8fdc-10988986d665\" (UID: \"ce5bf85d-dd65-4174-8fdc-10988986d665\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782027 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content\") pod \"7337688e-9bf5-436c-be77-afad7fa093ed\" (UID: \"7337688e-9bf5-436c-be77-afad7fa093ed\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782054 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities\") pod \"ea398908-abb3-4ce8-bbaf-a44b6350314d\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782078 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities\") pod \"28d973df-adc6-41b0-81b9-afd3a743641f\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782128 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6lkv\" (UniqueName: \"kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv\") pod \"28d973df-adc6-41b0-81b9-afd3a743641f\" (UID: \"28d973df-adc6-41b0-81b9-afd3a743641f\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782167 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content\") pod \"ea398908-abb3-4ce8-bbaf-a44b6350314d\" (UID: \"ea398908-abb3-4ce8-bbaf-a44b6350314d\") " Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782454 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782471 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56lwr\" (UniqueName: \"kubernetes.io/projected/1fa30c32-b5b9-48db-baf7-761da95213f7-kube-api-access-56lwr\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782487 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1fa30c32-b5b9-48db-baf7-761da95213f7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.782852 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities" (OuterVolumeSpecName: "utilities") pod "ea398908-abb3-4ce8-bbaf-a44b6350314d" (UID: "ea398908-abb3-4ce8-bbaf-a44b6350314d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.783224 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities" (OuterVolumeSpecName: "utilities") pod "28d973df-adc6-41b0-81b9-afd3a743641f" (UID: "28d973df-adc6-41b0-81b9-afd3a743641f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.783663 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities" (OuterVolumeSpecName: "utilities") pod "7337688e-9bf5-436c-be77-afad7fa093ed" (UID: "7337688e-9bf5-436c-be77-afad7fa093ed"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.784744 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf" (OuterVolumeSpecName: "kube-api-access-t5mqf") pod "ea398908-abb3-4ce8-bbaf-a44b6350314d" (UID: "ea398908-abb3-4ce8-bbaf-a44b6350314d"). InnerVolumeSpecName "kube-api-access-t5mqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.785248 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv" (OuterVolumeSpecName: "kube-api-access-z6lkv") pod "28d973df-adc6-41b0-81b9-afd3a743641f" (UID: "28d973df-adc6-41b0-81b9-afd3a743641f"). InnerVolumeSpecName "kube-api-access-z6lkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.785646 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s" (OuterVolumeSpecName: "kube-api-access-k587s") pod "7337688e-9bf5-436c-be77-afad7fa093ed" (UID: "7337688e-9bf5-436c-be77-afad7fa093ed"). InnerVolumeSpecName "kube-api-access-k587s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.785859 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "ce5bf85d-dd65-4174-8fdc-10988986d665" (UID: "ce5bf85d-dd65-4174-8fdc-10988986d665"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.786216 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r" (OuterVolumeSpecName: "kube-api-access-l5c5r") pod "ce5bf85d-dd65-4174-8fdc-10988986d665" (UID: "ce5bf85d-dd65-4174-8fdc-10988986d665"). InnerVolumeSpecName "kube-api-access-l5c5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.787226 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "ce5bf85d-dd65-4174-8fdc-10988986d665" (UID: "ce5bf85d-dd65-4174-8fdc-10988986d665"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.802948 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7337688e-9bf5-436c-be77-afad7fa093ed" (UID: "7337688e-9bf5-436c-be77-afad7fa093ed"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.836599 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28d973df-adc6-41b0-81b9-afd3a743641f" (UID: "28d973df-adc6-41b0-81b9-afd3a743641f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884022 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6lkv\" (UniqueName: \"kubernetes.io/projected/28d973df-adc6-41b0-81b9-afd3a743641f-kube-api-access-z6lkv\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884387 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5mqf\" (UniqueName: \"kubernetes.io/projected/ea398908-abb3-4ce8-bbaf-a44b6350314d-kube-api-access-t5mqf\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884480 4865 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884553 4865 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ce5bf85d-dd65-4174-8fdc-10988986d665-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884632 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k587s\" (UniqueName: \"kubernetes.io/projected/7337688e-9bf5-436c-be77-afad7fa093ed-kube-api-access-k587s\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884709 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884787 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884882 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5c5r\" (UniqueName: \"kubernetes.io/projected/ce5bf85d-dd65-4174-8fdc-10988986d665-kube-api-access-l5c5r\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.884958 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7337688e-9bf5-436c-be77-afad7fa093ed-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.885049 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.885128 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28d973df-adc6-41b0-81b9-afd3a743641f-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.907880 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ea398908-abb3-4ce8-bbaf-a44b6350314d" (UID: "ea398908-abb3-4ce8-bbaf-a44b6350314d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.966851 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-9c8ld"] Jan 26 17:00:38 crc kubenswrapper[4865]: I0126 17:00:38.986409 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ea398908-abb3-4ce8-bbaf-a44b6350314d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.530400 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hxg9k" event={"ID":"ea398908-abb3-4ce8-bbaf-a44b6350314d","Type":"ContainerDied","Data":"4992a2592c64dad3a19efb08f8edcb038ec0da078e820f28a1c80d98f7397a15"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.530793 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hxg9k" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.530825 4865 scope.go:117] "RemoveContainer" containerID="e5a20880dba9ecd775bb5301de21ed37c5f3d94a131e9cc629e031387ca9ca47" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.539123 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r9jxm" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.539133 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r9jxm" event={"ID":"7337688e-9bf5-436c-be77-afad7fa093ed","Type":"ContainerDied","Data":"b66624227081c3403e448e8cff50fd13055d9f88112599cdc725a4db0d6323cb"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.540978 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" event={"ID":"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4","Type":"ContainerStarted","Data":"e1ec62a7e44db65547aa915e293a1a2fd868080892812ad947fe9a19bfc45593"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.541104 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" event={"ID":"50bc8e04-9801-4c19-9e84-ee8ea7c0cad4","Type":"ContainerStarted","Data":"7b639dab101a76c661032d51ccc4994cb6621a4fc8e1b1c31b02a75b344bee60"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.541457 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.547522 4865 scope.go:117] "RemoveContainer" containerID="4591f46793bd88eca8ded4e7a3162c6f5c33af4606451fa3f64fb6ee007727af" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.547931 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-gw6ld" event={"ID":"1fa30c32-b5b9-48db-baf7-761da95213f7","Type":"ContainerDied","Data":"449e124fb15ade49d894be02feff87e8408a0b914228f6505ef2242b307a6054"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.548110 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-gw6ld" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.549776 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.549819 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-phb7b" event={"ID":"ce5bf85d-dd65-4174-8fdc-10988986d665","Type":"ContainerDied","Data":"cc5d1e460c5cdff15cc7001bb603dc4a93626e259a4c356270bfa5786f57c2e5"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.551652 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7x4jl" event={"ID":"28d973df-adc6-41b0-81b9-afd3a743641f","Type":"ContainerDied","Data":"3e9d3cd15811ee0ec5c96ee9118d1714ce3c7154d8f277f81eeac9d7c0ca421c"} Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.551769 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7x4jl" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.562105 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.567918 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-9c8ld" podStartSLOduration=2.567898216 podStartE2EDuration="2.567898216s" podCreationTimestamp="2026-01-26 17:00:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:00:39.567577837 +0000 UTC m=+367.151463424" watchObservedRunningTime="2026-01-26 17:00:39.567898216 +0000 UTC m=+367.151783803" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.571690 4865 scope.go:117] "RemoveContainer" containerID="3a6e1f3ba6ae2b3ef3d4cf5aef1b551da2777b6c2b579d6c7bfe48ae5527d647" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.604292 4865 scope.go:117] "RemoveContainer" containerID="d84edf2c216e585f647c74ee23cc9cf7d2e6450ac0a20757d2ffd2ff48519a17" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.628711 4865 scope.go:117] "RemoveContainer" containerID="dc1415cba01735bf8f78879807619fb140a2fa74976ea17e880df730de5916c5" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.636961 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.640640 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-phb7b"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.649935 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.654417 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hxg9k"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.662928 4865 scope.go:117] "RemoveContainer" containerID="199a7f344e605cfba54f04202ad8536a401034b3240f7fd1a01fdb5c29d5073c" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.669393 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.676121 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-gw6ld"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.684750 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.703077 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-r9jxm"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.708646 4865 scope.go:117] "RemoveContainer" containerID="e3e0e69295103de391636c38ede4250e4e109f56322501446ce035b5c809b31a" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.711525 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.725851 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7x4jl"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.752107 4865 scope.go:117] "RemoveContainer" containerID="93e64981d8b9951ed6406840fbef03e757e2bb2a488f1543615661b32e606972" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.766592 4865 scope.go:117] "RemoveContainer" containerID="3264020642a3593da10e8e9650cc7daa51e0cef67a76db5c21c5b394de83b752" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.779398 4865 scope.go:117] "RemoveContainer" containerID="51ca7c97b44cf70fd6b579979b4622ce13160e317e74d5ad33e83a7b0bf15983" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.795440 4865 scope.go:117] "RemoveContainer" containerID="2f319958091fc0d9103daaf13741d642c5e7458be0a2050797d55e55e5f02b1e" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.809179 4865 scope.go:117] "RemoveContainer" containerID="141e4cb88b61ce456d7319fae495575d0906950c62e6c46fb927848a6dee34cd" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.823879 4865 scope.go:117] "RemoveContainer" containerID="4dc0a3a6e67aa86fb23ba40815915da6aa28dd41101e79d40808ad70298c1b0a" Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.978089 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:39 crc kubenswrapper[4865]: I0126 17:00:39.978355 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" podUID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" containerName="route-controller-manager" containerID="cri-o://43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9" gracePeriod=30 Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.370947 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" path="/var/lib/kubelet/pods/1fa30c32-b5b9-48db-baf7-761da95213f7/volumes" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.372495 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" path="/var/lib/kubelet/pods/28d973df-adc6-41b0-81b9-afd3a743641f/volumes" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.373289 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" path="/var/lib/kubelet/pods/7337688e-9bf5-436c-be77-afad7fa093ed/volumes" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.374523 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" path="/var/lib/kubelet/pods/ce5bf85d-dd65-4174-8fdc-10988986d665/volumes" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.375982 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" path="/var/lib/kubelet/pods/ea398908-abb3-4ce8-bbaf-a44b6350314d/volumes" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.424465 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.513106 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert\") pod \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.513189 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config\") pod \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.513234 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz95n\" (UniqueName: \"kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n\") pod \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.513302 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca\") pod \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\" (UID: \"d5184966-efe6-4c65-a6a3-fb982bb28bdb\") " Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.514302 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca" (OuterVolumeSpecName: "client-ca") pod "d5184966-efe6-4c65-a6a3-fb982bb28bdb" (UID: "d5184966-efe6-4c65-a6a3-fb982bb28bdb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.515346 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config" (OuterVolumeSpecName: "config") pod "d5184966-efe6-4c65-a6a3-fb982bb28bdb" (UID: "d5184966-efe6-4c65-a6a3-fb982bb28bdb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.520291 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d5184966-efe6-4c65-a6a3-fb982bb28bdb" (UID: "d5184966-efe6-4c65-a6a3-fb982bb28bdb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.521263 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n" (OuterVolumeSpecName: "kube-api-access-dz95n") pod "d5184966-efe6-4c65-a6a3-fb982bb28bdb" (UID: "d5184966-efe6-4c65-a6a3-fb982bb28bdb"). InnerVolumeSpecName "kube-api-access-dz95n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.561292 4865 generic.go:334] "Generic (PLEG): container finished" podID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" containerID="43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9" exitCode=0 Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.561412 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.561546 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" event={"ID":"d5184966-efe6-4c65-a6a3-fb982bb28bdb","Type":"ContainerDied","Data":"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9"} Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.561595 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs" event={"ID":"d5184966-efe6-4c65-a6a3-fb982bb28bdb","Type":"ContainerDied","Data":"f956e98ced680370698527bdbdd6b896737ef1ca682b0115accdc63125ecd103"} Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.561834 4865 scope.go:117] "RemoveContainer" containerID="43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.581013 4865 scope.go:117] "RemoveContainer" containerID="43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.587582 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9\": container with ID starting with 43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9 not found: ID does not exist" containerID="43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.587646 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9"} err="failed to get container status \"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9\": rpc error: code = NotFound desc = could not find container \"43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9\": container with ID starting with 43db41ebfcce6451953b7412483ab15964f7c00b709c7f0d55d4c184983ae7d9 not found: ID does not exist" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.589261 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.592720 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6b5549788c-qw5fs"] Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.615075 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.615140 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d5184966-efe6-4c65-a6a3-fb982bb28bdb-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.615163 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5184966-efe6-4c65-a6a3-fb982bb28bdb-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.615181 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz95n\" (UniqueName: \"kubernetes.io/projected/d5184966-efe6-4c65-a6a3-fb982bb28bdb-kube-api-access-dz95n\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972336 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972550 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972563 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972572 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972579 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972589 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972597 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972603 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972609 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972616 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972621 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972627 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972633 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972644 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972650 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972658 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" containerName="route-controller-manager" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972663 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" containerName="route-controller-manager" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972672 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972677 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972684 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972689 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972698 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972703 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="extract-content" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972710 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972716 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972723 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972729 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: E0126 17:00:40.972736 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972743 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="extract-utilities" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972839 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea398908-abb3-4ce8-bbaf-a44b6350314d" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972850 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa30c32-b5b9-48db-baf7-761da95213f7" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972859 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" containerName="route-controller-manager" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972865 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="7337688e-9bf5-436c-be77-afad7fa093ed" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972875 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="28d973df-adc6-41b0-81b9-afd3a743641f" containerName="registry-server" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.972881 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5bf85d-dd65-4174-8fdc-10988986d665" containerName="marketplace-operator" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.973601 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.975380 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 17:00:40 crc kubenswrapper[4865]: I0126 17:00:40.981972 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.046598 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn"] Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.047480 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.049583 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.049750 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.054358 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.054563 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.054741 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.055265 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.059257 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn"] Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.123228 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wmbc\" (UniqueName: \"kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.123325 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.123435 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.224837 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6z2t4\" (UniqueName: \"kubernetes.io/projected/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-kube-api-access-6z2t4\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.224914 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-serving-cert\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225053 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225099 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-config\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225135 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wmbc\" (UniqueName: \"kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225166 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-client-ca\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225318 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225585 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.225680 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.242624 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wmbc\" (UniqueName: \"kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc\") pod \"redhat-marketplace-8vh55\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.292254 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.327709 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-config\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.327774 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-client-ca\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.327834 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6z2t4\" (UniqueName: \"kubernetes.io/projected/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-kube-api-access-6z2t4\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.327864 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-serving-cert\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.329284 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-client-ca\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.329420 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-config\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.332306 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-serving-cert\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.345914 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6z2t4\" (UniqueName: \"kubernetes.io/projected/0a99cc14-e6ee-4c2e-817a-1e46fab1ba57-kube-api-access-6z2t4\") pod \"route-controller-manager-694d7c4b67-87czn\" (UID: \"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57\") " pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.370828 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.565016 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.566793 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.568934 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.577671 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.666713 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:00:41 crc kubenswrapper[4865]: W0126 17:00:41.680803 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e7fa29d_427b_4a11_88db_190d64b03f52.slice/crio-8a9757b23525510655e453955470d1eeb8044df3b2a5daf7eb466eb1e765c1a7 WatchSource:0}: Error finding container 8a9757b23525510655e453955470d1eeb8044df3b2a5daf7eb466eb1e765c1a7: Status 404 returned error can't find the container with id 8a9757b23525510655e453955470d1eeb8044df3b2a5daf7eb466eb1e765c1a7 Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.732800 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.734073 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.734141 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmdsn\" (UniqueName: \"kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.774379 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn"] Jan 26 17:00:41 crc kubenswrapper[4865]: W0126 17:00:41.781721 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a99cc14_e6ee_4c2e_817a_1e46fab1ba57.slice/crio-f77a9485189a522578705e9c3a571e801bc1f9653b09ba92a758d15726c1b4f8 WatchSource:0}: Error finding container f77a9485189a522578705e9c3a571e801bc1f9653b09ba92a758d15726c1b4f8: Status 404 returned error can't find the container with id f77a9485189a522578705e9c3a571e801bc1f9653b09ba92a758d15726c1b4f8 Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.835387 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.835633 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.835778 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmdsn\" (UniqueName: \"kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.835909 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.837317 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.855541 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmdsn\" (UniqueName: \"kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn\") pod \"certified-operators-sjpcq\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:41 crc kubenswrapper[4865]: I0126 17:00:41.891063 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.281789 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.365237 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5184966-efe6-4c65-a6a3-fb982bb28bdb" path="/var/lib/kubelet/pods/d5184966-efe6-4c65-a6a3-fb982bb28bdb/volumes" Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.582765 4865 generic.go:334] "Generic (PLEG): container finished" podID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerID="4b05e4937b3ad36258e87b5d78f3700d05e622deb55d406dd916246548bc98e5" exitCode=0 Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.582824 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerDied","Data":"4b05e4937b3ad36258e87b5d78f3700d05e622deb55d406dd916246548bc98e5"} Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.582849 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerStarted","Data":"8a9757b23525510655e453955470d1eeb8044df3b2a5daf7eb466eb1e765c1a7"} Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.586633 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" event={"ID":"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57","Type":"ContainerStarted","Data":"62ca262fb7fd2bfd3790e583c182a5db137e599c1c78a3c4d8f7eff4f2adb7c4"} Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.586670 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" event={"ID":"0a99cc14-e6ee-4c2e-817a-1e46fab1ba57","Type":"ContainerStarted","Data":"f77a9485189a522578705e9c3a571e801bc1f9653b09ba92a758d15726c1b4f8"} Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.587496 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.590878 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerStarted","Data":"d4629acaa149058f5a8cc6e405c4f18336adb61a336cfd859eb9563fa99a121f"} Jan 26 17:00:42 crc kubenswrapper[4865]: I0126 17:00:42.597763 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.566628 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-694d7c4b67-87czn" podStartSLOduration=4.566607043 podStartE2EDuration="4.566607043s" podCreationTimestamp="2026-01-26 17:00:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:00:42.634125697 +0000 UTC m=+370.218011284" watchObservedRunningTime="2026-01-26 17:00:43.566607043 +0000 UTC m=+371.150492630" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.569615 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lw6st"] Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.570890 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.576141 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.576914 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lw6st"] Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.598164 4865 generic.go:334] "Generic (PLEG): container finished" podID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" containerID="9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da" exitCode=0 Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.598266 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerDied","Data":"9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da"} Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.673675 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-utilities\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.673830 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-catalog-content\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.673867 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqgzr\" (UniqueName: \"kubernetes.io/projected/83363318-2aa6-4dd2-b258-f369e1aa1dc5-kube-api-access-xqgzr\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.774640 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-catalog-content\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.774888 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqgzr\" (UniqueName: \"kubernetes.io/projected/83363318-2aa6-4dd2-b258-f369e1aa1dc5-kube-api-access-xqgzr\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.774966 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-utilities\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.776335 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-catalog-content\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.778338 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/83363318-2aa6-4dd2-b258-f369e1aa1dc5-utilities\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.796034 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqgzr\" (UniqueName: \"kubernetes.io/projected/83363318-2aa6-4dd2-b258-f369e1aa1dc5-kube-api-access-xqgzr\") pod \"community-operators-lw6st\" (UID: \"83363318-2aa6-4dd2-b258-f369e1aa1dc5\") " pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:43 crc kubenswrapper[4865]: I0126 17:00:43.926043 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.165680 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jljqd"] Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.166889 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.170379 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.176834 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jljqd"] Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.290942 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7d8d\" (UniqueName: \"kubernetes.io/projected/44eb3feb-9970-4a11-bc32-4e0da21dc345-kube-api-access-r7d8d\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.291035 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-utilities\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.294811 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-catalog-content\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.321463 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lw6st"] Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.396391 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7d8d\" (UniqueName: \"kubernetes.io/projected/44eb3feb-9970-4a11-bc32-4e0da21dc345-kube-api-access-r7d8d\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.396770 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-utilities\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.396820 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-catalog-content\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.398258 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-catalog-content\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.398508 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/44eb3feb-9970-4a11-bc32-4e0da21dc345-utilities\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.420377 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7d8d\" (UniqueName: \"kubernetes.io/projected/44eb3feb-9970-4a11-bc32-4e0da21dc345-kube-api-access-r7d8d\") pod \"redhat-operators-jljqd\" (UID: \"44eb3feb-9970-4a11-bc32-4e0da21dc345\") " pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.491175 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.619022 4865 generic.go:334] "Generic (PLEG): container finished" podID="83363318-2aa6-4dd2-b258-f369e1aa1dc5" containerID="d6c76d1ff410d1e0b37811476efa781e615697a6d745202be551434e287ac4ac" exitCode=0 Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.619414 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lw6st" event={"ID":"83363318-2aa6-4dd2-b258-f369e1aa1dc5","Type":"ContainerDied","Data":"d6c76d1ff410d1e0b37811476efa781e615697a6d745202be551434e287ac4ac"} Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.619449 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lw6st" event={"ID":"83363318-2aa6-4dd2-b258-f369e1aa1dc5","Type":"ContainerStarted","Data":"dfa58437a5bbf2e920e85d40011f621f9c5a35de605cb62f6031d8e45c07261e"} Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.629446 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerStarted","Data":"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89"} Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.636755 4865 generic.go:334] "Generic (PLEG): container finished" podID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerID="e7a759f74ce8958cb015eb80a4f85a3460da82913aac96aec2481dd6ad214297" exitCode=0 Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.636835 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerDied","Data":"e7a759f74ce8958cb015eb80a4f85a3460da82913aac96aec2481dd6ad214297"} Jan 26 17:00:44 crc kubenswrapper[4865]: I0126 17:00:44.997761 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jljqd"] Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.644013 4865 generic.go:334] "Generic (PLEG): container finished" podID="44eb3feb-9970-4a11-bc32-4e0da21dc345" containerID="42e52a0eff75b05dcb9423ecf4a4574432905a23fb8984f5a375eedf5ed6f551" exitCode=0 Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.644074 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jljqd" event={"ID":"44eb3feb-9970-4a11-bc32-4e0da21dc345","Type":"ContainerDied","Data":"42e52a0eff75b05dcb9423ecf4a4574432905a23fb8984f5a375eedf5ed6f551"} Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.644478 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jljqd" event={"ID":"44eb3feb-9970-4a11-bc32-4e0da21dc345","Type":"ContainerStarted","Data":"7342ee667bc15470deb26c40567fb8418e699d0385be0b3698f72b32b82d8b42"} Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.646753 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerStarted","Data":"3c8bacee891a9286e210100054040e6c21d07cb2065cee678ff0c2bcfc77f43f"} Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.650881 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lw6st" event={"ID":"83363318-2aa6-4dd2-b258-f369e1aa1dc5","Type":"ContainerStarted","Data":"4dc74e2cdbef23ca2789e9b88af9f8a17a5c003a826187e834c8f5aa4aa444e6"} Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.653665 4865 generic.go:334] "Generic (PLEG): container finished" podID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" containerID="6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89" exitCode=0 Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.653710 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerDied","Data":"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89"} Jan 26 17:00:45 crc kubenswrapper[4865]: I0126 17:00:45.716107 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8vh55" podStartSLOduration=3.038666733 podStartE2EDuration="5.716088568s" podCreationTimestamp="2026-01-26 17:00:40 +0000 UTC" firstStartedPulling="2026-01-26 17:00:42.584758864 +0000 UTC m=+370.168644451" lastFinishedPulling="2026-01-26 17:00:45.262180699 +0000 UTC m=+372.846066286" observedRunningTime="2026-01-26 17:00:45.71282857 +0000 UTC m=+373.296714167" watchObservedRunningTime="2026-01-26 17:00:45.716088568 +0000 UTC m=+373.299974155" Jan 26 17:00:46 crc kubenswrapper[4865]: I0126 17:00:46.661655 4865 generic.go:334] "Generic (PLEG): container finished" podID="83363318-2aa6-4dd2-b258-f369e1aa1dc5" containerID="4dc74e2cdbef23ca2789e9b88af9f8a17a5c003a826187e834c8f5aa4aa444e6" exitCode=0 Jan 26 17:00:46 crc kubenswrapper[4865]: I0126 17:00:46.662152 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lw6st" event={"ID":"83363318-2aa6-4dd2-b258-f369e1aa1dc5","Type":"ContainerDied","Data":"4dc74e2cdbef23ca2789e9b88af9f8a17a5c003a826187e834c8f5aa4aa444e6"} Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.552786 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerName="registry" containerID="cri-o://d3cfa5e14f941ec45b4d6bbaec8032621018739ee882910197b779b79676deb3" gracePeriod=30 Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.672532 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jljqd" event={"ID":"44eb3feb-9970-4a11-bc32-4e0da21dc345","Type":"ContainerStarted","Data":"f88c5cb215db6c90e8a6e458ecc29b742ad3b5693c239728928253ac97506eeb"} Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.677120 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lw6st" event={"ID":"83363318-2aa6-4dd2-b258-f369e1aa1dc5","Type":"ContainerStarted","Data":"706b35089d9eb9523fbce94b122898dfc7a1302740552e89d14dd88008dc0a6b"} Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.687606 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerStarted","Data":"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53"} Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.717220 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sjpcq" podStartSLOduration=3.5465334459999998 podStartE2EDuration="6.717200824s" podCreationTimestamp="2026-01-26 17:00:41 +0000 UTC" firstStartedPulling="2026-01-26 17:00:43.600057282 +0000 UTC m=+371.183942869" lastFinishedPulling="2026-01-26 17:00:46.77072466 +0000 UTC m=+374.354610247" observedRunningTime="2026-01-26 17:00:47.716330998 +0000 UTC m=+375.300216585" watchObservedRunningTime="2026-01-26 17:00:47.717200824 +0000 UTC m=+375.301086411" Jan 26 17:00:47 crc kubenswrapper[4865]: I0126 17:00:47.739895 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lw6st" podStartSLOduration=2.272393264 podStartE2EDuration="4.739865861s" podCreationTimestamp="2026-01-26 17:00:43 +0000 UTC" firstStartedPulling="2026-01-26 17:00:44.624284307 +0000 UTC m=+372.208169894" lastFinishedPulling="2026-01-26 17:00:47.091756904 +0000 UTC m=+374.675642491" observedRunningTime="2026-01-26 17:00:47.738403797 +0000 UTC m=+375.322289384" watchObservedRunningTime="2026-01-26 17:00:47.739865861 +0000 UTC m=+375.323751448" Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.700685 4865 generic.go:334] "Generic (PLEG): container finished" podID="44eb3feb-9970-4a11-bc32-4e0da21dc345" containerID="f88c5cb215db6c90e8a6e458ecc29b742ad3b5693c239728928253ac97506eeb" exitCode=0 Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.700803 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jljqd" event={"ID":"44eb3feb-9970-4a11-bc32-4e0da21dc345","Type":"ContainerDied","Data":"f88c5cb215db6c90e8a6e458ecc29b742ad3b5693c239728928253ac97506eeb"} Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.710502 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" event={"ID":"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625","Type":"ContainerDied","Data":"d3cfa5e14f941ec45b4d6bbaec8032621018739ee882910197b779b79676deb3"} Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.710444 4865 generic.go:334] "Generic (PLEG): container finished" podID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerID="d3cfa5e14f941ec45b4d6bbaec8032621018739ee882910197b779b79676deb3" exitCode=0 Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.710664 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" event={"ID":"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625","Type":"ContainerDied","Data":"c394ed494db06392ea6567794819c06635cbea9016883a5bbd59c0b15dd95d55"} Jan 26 17:00:48 crc kubenswrapper[4865]: I0126 17:00:48.710683 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c394ed494db06392ea6567794819c06635cbea9016883a5bbd59c0b15dd95d55" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.303765 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490725 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490799 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8lww\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490862 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490896 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490913 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.490947 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.491123 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.491148 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls\") pod \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\" (UID: \"41f9cfdd-4a56-4e9a-ac7c-8b683e96d625\") " Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.494088 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.494364 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.512899 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww" (OuterVolumeSpecName: "kube-api-access-p8lww") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "kube-api-access-p8lww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.514871 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.516561 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.520236 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.523382 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592673 4865 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592911 4865 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592922 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8lww\" (UniqueName: \"kubernetes.io/projected/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-kube-api-access-p8lww\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592932 4865 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592942 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592950 4865 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.592957 4865 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.627543 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" (UID: "41f9cfdd-4a56-4e9a-ac7c-8b683e96d625"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.716352 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.749272 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 17:00:49 crc kubenswrapper[4865]: I0126 17:00:49.755417 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-gfbfj"] Jan 26 17:00:50 crc kubenswrapper[4865]: I0126 17:00:50.371060 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" path="/var/lib/kubelet/pods/41f9cfdd-4a56-4e9a-ac7c-8b683e96d625/volumes" Jan 26 17:00:50 crc kubenswrapper[4865]: I0126 17:00:50.755926 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jljqd" event={"ID":"44eb3feb-9970-4a11-bc32-4e0da21dc345","Type":"ContainerStarted","Data":"a54a503f58bd721f294c28d8f9958920e7b131c4d3fb9103783e19a66cc2cec2"} Jan 26 17:00:50 crc kubenswrapper[4865]: I0126 17:00:50.780472 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jljqd" podStartSLOduration=2.111535213 podStartE2EDuration="6.780450916s" podCreationTimestamp="2026-01-26 17:00:44 +0000 UTC" firstStartedPulling="2026-01-26 17:00:45.645513591 +0000 UTC m=+373.229399178" lastFinishedPulling="2026-01-26 17:00:50.314429294 +0000 UTC m=+377.898314881" observedRunningTime="2026-01-26 17:00:50.775811947 +0000 UTC m=+378.359697534" watchObservedRunningTime="2026-01-26 17:00:50.780450916 +0000 UTC m=+378.364336503" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.292616 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.293485 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.340908 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.879355 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.891392 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.891674 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:51 crc kubenswrapper[4865]: I0126 17:00:51.943983 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:52 crc kubenswrapper[4865]: I0126 17:00:52.811703 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:00:53 crc kubenswrapper[4865]: I0126 17:00:53.774791 4865 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-gfbfj container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.7:5000/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 17:00:53 crc kubenswrapper[4865]: I0126 17:00:53.774894 4865 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-gfbfj" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.7:5000/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 17:00:53 crc kubenswrapper[4865]: I0126 17:00:53.928944 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:53 crc kubenswrapper[4865]: I0126 17:00:53.929646 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:53 crc kubenswrapper[4865]: I0126 17:00:53.979161 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:54 crc kubenswrapper[4865]: I0126 17:00:54.491697 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:54 crc kubenswrapper[4865]: I0126 17:00:54.491782 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:00:54 crc kubenswrapper[4865]: I0126 17:00:54.845173 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lw6st" Jan 26 17:00:55 crc kubenswrapper[4865]: I0126 17:00:55.545981 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jljqd" podUID="44eb3feb-9970-4a11-bc32-4e0da21dc345" containerName="registry-server" probeResult="failure" output=< Jan 26 17:00:55 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:00:55 crc kubenswrapper[4865]: > Jan 26 17:01:00 crc kubenswrapper[4865]: I0126 17:01:00.057412 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:01:00 crc kubenswrapper[4865]: I0126 17:01:00.058519 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" podUID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" containerName="controller-manager" containerID="cri-o://313901d12b4ee1fa0351e017156b97ddad74bdba716258e1a362577a3f752362" gracePeriod=30 Jan 26 17:01:00 crc kubenswrapper[4865]: I0126 17:01:00.853684 4865 generic.go:334] "Generic (PLEG): container finished" podID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" containerID="313901d12b4ee1fa0351e017156b97ddad74bdba716258e1a362577a3f752362" exitCode=0 Jan 26 17:01:00 crc kubenswrapper[4865]: I0126 17:01:00.853765 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" event={"ID":"85195773-149b-4dcc-a3cc-3d7c5f063e1d","Type":"ContainerDied","Data":"313901d12b4ee1fa0351e017156b97ddad74bdba716258e1a362577a3f752362"} Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.213282 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.240805 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk"] Jan 26 17:01:02 crc kubenswrapper[4865]: E0126 17:01:02.241130 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" containerName="controller-manager" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.241149 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" containerName="controller-manager" Jan 26 17:01:02 crc kubenswrapper[4865]: E0126 17:01:02.241167 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerName="registry" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.241177 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerName="registry" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.241294 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="41f9cfdd-4a56-4e9a-ac7c-8b683e96d625" containerName="registry" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.241317 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" containerName="controller-manager" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.241698 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.257084 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk"] Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.294208 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-serving-cert\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.294337 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-config\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.294431 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-proxy-ca-bundles\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.294473 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44tts\" (UniqueName: \"kubernetes.io/projected/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-kube-api-access-44tts\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.294606 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-client-ca\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.401930 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config\") pod \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402030 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert\") pod \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402065 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles\") pod \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402139 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca\") pod \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402188 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnb7x\" (UniqueName: \"kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x\") pod \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\" (UID: \"85195773-149b-4dcc-a3cc-3d7c5f063e1d\") " Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402572 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-config\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402674 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-proxy-ca-bundles\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402708 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44tts\" (UniqueName: \"kubernetes.io/projected/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-kube-api-access-44tts\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402739 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-client-ca\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.402809 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-serving-cert\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.411850 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-proxy-ca-bundles\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.412672 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-client-ca\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.412889 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x" (OuterVolumeSpecName: "kube-api-access-wnb7x") pod "85195773-149b-4dcc-a3cc-3d7c5f063e1d" (UID: "85195773-149b-4dcc-a3cc-3d7c5f063e1d"). InnerVolumeSpecName "kube-api-access-wnb7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.413365 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca" (OuterVolumeSpecName: "client-ca") pod "85195773-149b-4dcc-a3cc-3d7c5f063e1d" (UID: "85195773-149b-4dcc-a3cc-3d7c5f063e1d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.413445 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "85195773-149b-4dcc-a3cc-3d7c5f063e1d" (UID: "85195773-149b-4dcc-a3cc-3d7c5f063e1d"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.413629 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config" (OuterVolumeSpecName: "config") pod "85195773-149b-4dcc-a3cc-3d7c5f063e1d" (UID: "85195773-149b-4dcc-a3cc-3d7c5f063e1d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.413976 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "85195773-149b-4dcc-a3cc-3d7c5f063e1d" (UID: "85195773-149b-4dcc-a3cc-3d7c5f063e1d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.414243 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-config\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.414664 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-serving-cert\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.430701 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44tts\" (UniqueName: \"kubernetes.io/projected/d66834fe-e2e4-4aa5-b5bb-cd269a57e63a-kube-api-access-44tts\") pod \"controller-manager-6d75f47c5c-w7dsk\" (UID: \"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a\") " pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.503916 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnb7x\" (UniqueName: \"kubernetes.io/projected/85195773-149b-4dcc-a3cc-3d7c5f063e1d-kube-api-access-wnb7x\") on node \"crc\" DevicePath \"\"" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.503976 4865 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.504057 4865 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.504072 4865 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/85195773-149b-4dcc-a3cc-3d7c5f063e1d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.504084 4865 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/85195773-149b-4dcc-a3cc-3d7c5f063e1d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.564679 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.817073 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk"] Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.872133 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" event={"ID":"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a","Type":"ContainerStarted","Data":"0a1ecc4284df900d307914a5376fe385521fab6c2a6f88141e7e9cfc4de494c4"} Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.879605 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" event={"ID":"85195773-149b-4dcc-a3cc-3d7c5f063e1d","Type":"ContainerDied","Data":"43daefb74a0e4b92f3c56548fcf2103acd89f6651d89a080998a705a15e91502"} Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.879675 4865 scope.go:117] "RemoveContainer" containerID="313901d12b4ee1fa0351e017156b97ddad74bdba716258e1a362577a3f752362" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.879855 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt" Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.942576 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:01:02 crc kubenswrapper[4865]: I0126 17:01:02.946733 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7bdc655bf5-8gwtt"] Jan 26 17:01:03 crc kubenswrapper[4865]: I0126 17:01:03.891463 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" event={"ID":"d66834fe-e2e4-4aa5-b5bb-cd269a57e63a","Type":"ContainerStarted","Data":"662428e853652f4b81abc973a743acb65676b1bc13957e4a339914d86fa415cc"} Jan 26 17:01:03 crc kubenswrapper[4865]: I0126 17:01:03.892039 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:03 crc kubenswrapper[4865]: I0126 17:01:03.899835 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" Jan 26 17:01:03 crc kubenswrapper[4865]: I0126 17:01:03.932451 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6d75f47c5c-w7dsk" podStartSLOduration=3.932406877 podStartE2EDuration="3.932406877s" podCreationTimestamp="2026-01-26 17:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:01:03.920285786 +0000 UTC m=+391.504171373" watchObservedRunningTime="2026-01-26 17:01:03.932406877 +0000 UTC m=+391.516292474" Jan 26 17:01:04 crc kubenswrapper[4865]: I0126 17:01:04.366770 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85195773-149b-4dcc-a3cc-3d7c5f063e1d" path="/var/lib/kubelet/pods/85195773-149b-4dcc-a3cc-3d7c5f063e1d/volumes" Jan 26 17:01:04 crc kubenswrapper[4865]: I0126 17:01:04.512381 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:01:04 crc kubenswrapper[4865]: I0126 17:01:04.513228 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:01:04 crc kubenswrapper[4865]: I0126 17:01:04.552904 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:01:04 crc kubenswrapper[4865]: I0126 17:01:04.609396 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jljqd" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.330077 4865 scope.go:117] "RemoveContainer" containerID="64e455c50925f7059dcc38a24cda814175218f1d0b153b821e8003f796083769" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.402881 4865 scope.go:117] "RemoveContainer" containerID="19ade21170351b101133d27144a53a9b6258d309678c998d7e13a020f5f7f45c" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.422155 4865 scope.go:117] "RemoveContainer" containerID="cbff43229de7b90f91ab14825b06a15f40006b223487cb26c757bc8352f97363" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.448452 4865 scope.go:117] "RemoveContainer" containerID="a4869b65b689e525c24f982ee0b79fbc44a928a51bae76dc24e89112c1720be1" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.464686 4865 scope.go:117] "RemoveContainer" containerID="0193be24b211dc7102a17b2ee15121c21ce981c0cd8fc57ac372d7ec9f6f4d06" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.484409 4865 scope.go:117] "RemoveContainer" containerID="d5610d76387528c6d73e03566f9acf4b744b79f4a39119d35d23b8b2dcb385f6" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.511924 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.512024 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.512077 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.512727 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:01:34 crc kubenswrapper[4865]: I0126 17:01:34.512789 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621" gracePeriod=600 Jan 26 17:01:36 crc kubenswrapper[4865]: I0126 17:01:36.111225 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621" exitCode=0 Jan 26 17:01:36 crc kubenswrapper[4865]: I0126 17:01:36.111348 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621"} Jan 26 17:01:36 crc kubenswrapper[4865]: I0126 17:01:36.111826 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec"} Jan 26 17:01:36 crc kubenswrapper[4865]: I0126 17:01:36.111885 4865 scope.go:117] "RemoveContainer" containerID="79930d9ec5ae2071a02e2417508458bd2f1b5693205ae0ad3ee46c5a3a31493b" Jan 26 17:03:34 crc kubenswrapper[4865]: I0126 17:03:34.650116 4865 scope.go:117] "RemoveContainer" containerID="d3cfa5e14f941ec45b4d6bbaec8032621018739ee882910197b779b79676deb3" Jan 26 17:04:04 crc kubenswrapper[4865]: I0126 17:04:04.512175 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:04:04 crc kubenswrapper[4865]: I0126 17:04:04.512885 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:04:34 crc kubenswrapper[4865]: I0126 17:04:34.512047 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:04:34 crc kubenswrapper[4865]: I0126 17:04:34.512654 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:05:04 crc kubenswrapper[4865]: I0126 17:05:04.512809 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:05:04 crc kubenswrapper[4865]: I0126 17:05:04.513585 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:05:04 crc kubenswrapper[4865]: I0126 17:05:04.513722 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:05:04 crc kubenswrapper[4865]: I0126 17:05:04.514449 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:05:04 crc kubenswrapper[4865]: I0126 17:05:04.514511 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec" gracePeriod=600 Jan 26 17:05:05 crc kubenswrapper[4865]: I0126 17:05:05.443886 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec" exitCode=0 Jan 26 17:05:05 crc kubenswrapper[4865]: I0126 17:05:05.443957 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec"} Jan 26 17:05:05 crc kubenswrapper[4865]: I0126 17:05:05.444299 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722"} Jan 26 17:05:05 crc kubenswrapper[4865]: I0126 17:05:05.444321 4865 scope.go:117] "RemoveContainer" containerID="bf7daacd9f18ffbc0209bc0be892797ce420eff313886eadad483fffc218b621" Jan 26 17:07:04 crc kubenswrapper[4865]: I0126 17:07:04.511923 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:07:04 crc kubenswrapper[4865]: I0126 17:07:04.512866 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:07:23 crc kubenswrapper[4865]: I0126 17:07:23.347077 4865 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 17:07:34 crc kubenswrapper[4865]: I0126 17:07:34.512633 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:07:34 crc kubenswrapper[4865]: I0126 17:07:34.513344 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:08:04 crc kubenswrapper[4865]: I0126 17:08:04.512087 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:08:04 crc kubenswrapper[4865]: I0126 17:08:04.512917 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:08:04 crc kubenswrapper[4865]: I0126 17:08:04.512973 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:08:04 crc kubenswrapper[4865]: I0126 17:08:04.513749 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:08:04 crc kubenswrapper[4865]: I0126 17:08:04.513813 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722" gracePeriod=600 Jan 26 17:08:05 crc kubenswrapper[4865]: I0126 17:08:05.061428 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722"} Jan 26 17:08:05 crc kubenswrapper[4865]: I0126 17:08:05.061362 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722" exitCode=0 Jan 26 17:08:05 crc kubenswrapper[4865]: I0126 17:08:05.062066 4865 scope.go:117] "RemoveContainer" containerID="f35ef80fd10ecec7df582c14f10449457009b396840776f5815d777acb4dfbec" Jan 26 17:08:06 crc kubenswrapper[4865]: I0126 17:08:06.071982 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c"} Jan 26 17:08:33 crc kubenswrapper[4865]: I0126 17:08:33.959876 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd"] Jan 26 17:08:33 crc kubenswrapper[4865]: I0126 17:08:33.961802 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:33 crc kubenswrapper[4865]: I0126 17:08:33.964511 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 17:08:33 crc kubenswrapper[4865]: I0126 17:08:33.970483 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd"] Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.028824 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.028930 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.029049 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmwfs\" (UniqueName: \"kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.129874 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.129941 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.130023 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmwfs\" (UniqueName: \"kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.130812 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.130914 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.155461 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmwfs\" (UniqueName: \"kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.292617 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.300467 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:34 crc kubenswrapper[4865]: I0126 17:08:34.506332 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd"] Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.257958 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerStarted","Data":"ef5902a66116049285e02af871b1b5d09ee7efbf23969467b682633b56f02dcc"} Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.258546 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerStarted","Data":"e17e5136aafe73a5897da248422868a09d003bd52addac409a8c5a83c1cafd18"} Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.698303 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.699646 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.717140 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.778279 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.778425 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqpzv\" (UniqueName: \"kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.778481 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.880278 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqpzv\" (UniqueName: \"kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.880372 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.880430 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.881058 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.881715 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:35 crc kubenswrapper[4865]: I0126 17:08:35.908872 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqpzv\" (UniqueName: \"kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv\") pod \"redhat-operators-jb7jc\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.024298 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.240704 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.273292 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerStarted","Data":"8ae169dd5c13409c621150a5d133e81beca289ec536b60ea202f3d7abb7d8548"} Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.275708 4865 generic.go:334] "Generic (PLEG): container finished" podID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerID="ef5902a66116049285e02af871b1b5d09ee7efbf23969467b682633b56f02dcc" exitCode=0 Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.275757 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerDied","Data":"ef5902a66116049285e02af871b1b5d09ee7efbf23969467b682633b56f02dcc"} Jan 26 17:08:36 crc kubenswrapper[4865]: I0126 17:08:36.279741 4865 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 17:08:37 crc kubenswrapper[4865]: I0126 17:08:37.285559 4865 generic.go:334] "Generic (PLEG): container finished" podID="480e659a-feec-47ca-b882-cee0df193df0" containerID="857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2" exitCode=0 Jan 26 17:08:37 crc kubenswrapper[4865]: I0126 17:08:37.285643 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerDied","Data":"857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2"} Jan 26 17:08:40 crc kubenswrapper[4865]: I0126 17:08:40.312395 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerStarted","Data":"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145"} Jan 26 17:08:40 crc kubenswrapper[4865]: I0126 17:08:40.315086 4865 generic.go:334] "Generic (PLEG): container finished" podID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerID="35b440910428489ff77d3773256f2d053834e1b9a13a27135bf52d84cfc48bb7" exitCode=0 Jan 26 17:08:40 crc kubenswrapper[4865]: I0126 17:08:40.315161 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerDied","Data":"35b440910428489ff77d3773256f2d053834e1b9a13a27135bf52d84cfc48bb7"} Jan 26 17:08:41 crc kubenswrapper[4865]: I0126 17:08:41.325106 4865 generic.go:334] "Generic (PLEG): container finished" podID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerID="2cde305e0778cf68c91a98f11abc50337a09ccb1abc360a1adb0797dd956cc0a" exitCode=0 Jan 26 17:08:41 crc kubenswrapper[4865]: I0126 17:08:41.325305 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerDied","Data":"2cde305e0778cf68c91a98f11abc50337a09ccb1abc360a1adb0797dd956cc0a"} Jan 26 17:08:41 crc kubenswrapper[4865]: I0126 17:08:41.327418 4865 generic.go:334] "Generic (PLEG): container finished" podID="480e659a-feec-47ca-b882-cee0df193df0" containerID="3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145" exitCode=0 Jan 26 17:08:41 crc kubenswrapper[4865]: I0126 17:08:41.327461 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerDied","Data":"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145"} Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.339090 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerStarted","Data":"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407"} Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.405518 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jb7jc" podStartSLOduration=2.837192922 podStartE2EDuration="7.40548554s" podCreationTimestamp="2026-01-26 17:08:35 +0000 UTC" firstStartedPulling="2026-01-26 17:08:37.287323357 +0000 UTC m=+844.871208954" lastFinishedPulling="2026-01-26 17:08:41.855615985 +0000 UTC m=+849.439501572" observedRunningTime="2026-01-26 17:08:42.362784396 +0000 UTC m=+849.946669993" watchObservedRunningTime="2026-01-26 17:08:42.40548554 +0000 UTC m=+849.989371127" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.406419 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-44x2q"] Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407192 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-controller" containerID="cri-o://7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407388 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="sbdb" containerID="cri-o://22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407452 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="nbdb" containerID="cri-o://a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407496 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="northd" containerID="cri-o://7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407543 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407610 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-node" containerID="cri-o://d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.407672 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-acl-logging" containerID="cri-o://80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.460259 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" containerID="cri-o://a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" gracePeriod=30 Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.641686 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.774675 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle\") pod \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.774766 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmwfs\" (UniqueName: \"kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs\") pod \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.774881 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util\") pod \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\" (UID: \"e436f3eb-958b-4b84-8e18-a2764bfaa3a5\") " Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.775544 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle" (OuterVolumeSpecName: "bundle") pod "e436f3eb-958b-4b84-8e18-a2764bfaa3a5" (UID: "e436f3eb-958b-4b84-8e18-a2764bfaa3a5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.783599 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs" (OuterVolumeSpecName: "kube-api-access-rmwfs") pod "e436f3eb-958b-4b84-8e18-a2764bfaa3a5" (UID: "e436f3eb-958b-4b84-8e18-a2764bfaa3a5"). InnerVolumeSpecName "kube-api-access-rmwfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.787522 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util" (OuterVolumeSpecName: "util") pod "e436f3eb-958b-4b84-8e18-a2764bfaa3a5" (UID: "e436f3eb-958b-4b84-8e18-a2764bfaa3a5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.876949 4865 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.877021 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmwfs\" (UniqueName: \"kubernetes.io/projected/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-kube-api-access-rmwfs\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:42 crc kubenswrapper[4865]: I0126 17:08:42.877032 4865 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e436f3eb-958b-4b84-8e18-a2764bfaa3a5-util\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.160085 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/3.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.162634 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovn-acl-logging/0.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.163290 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovn-controller/0.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.163709 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.234745 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-49s4v"] Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235030 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-acl-logging" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235045 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-acl-logging" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235057 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="nbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235065 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="nbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235073 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="util" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235080 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="util" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235089 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235097 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235108 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="extract" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235115 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="extract" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235128 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235135 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235144 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="northd" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235151 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="northd" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235159 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="sbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235166 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="sbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235175 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kubecfg-setup" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235182 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kubecfg-setup" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235195 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235203 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235214 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235222 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235229 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="pull" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235236 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="pull" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235248 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235255 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235266 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235274 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235285 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-node" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235292 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-node" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235408 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235431 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="kube-rbac-proxy-node" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235441 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="sbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235451 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235460 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235468 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235477 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="nbdb" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235487 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="e436f3eb-958b-4b84-8e18-a2764bfaa3a5" containerName="extract" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235497 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="northd" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235508 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovn-acl-logging" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235521 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235529 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.235645 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235655 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.235778 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerName="ovnkube-controller" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.237854 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281725 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281821 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281849 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281876 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281924 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281943 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281958 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281981 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.281965 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282013 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282016 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282063 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282036 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282103 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282072 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket" (OuterVolumeSpecName: "log-socket") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282173 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282110 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282206 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282134 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282139 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log" (OuterVolumeSpecName: "node-log") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282240 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282262 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282284 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282304 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282324 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282344 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282340 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282376 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282406 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcbts\" (UniqueName: \"kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282424 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units\") pod \"0c0135f6-4074-4aab-9413-a8eb948cd566\" (UID: \"0c0135f6-4074-4aab-9413-a8eb948cd566\") " Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282353 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash" (OuterVolumeSpecName: "host-slash") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282359 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282372 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282562 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282673 4865 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282692 4865 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282703 4865 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-node-log\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282714 4865 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282724 4865 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-log-socket\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282736 4865 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282754 4865 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282767 4865 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-slash\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282779 4865 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282790 4865 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282797 4865 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282805 4865 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282814 4865 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282822 4865 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282900 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.282963 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.283180 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.292412 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.298254 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts" (OuterVolumeSpecName: "kube-api-access-vcbts") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "kube-api-access-vcbts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.308339 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0c0135f6-4074-4aab-9413-a8eb948cd566" (UID: "0c0135f6-4074-4aab-9413-a8eb948cd566"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.343711 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovnkube-controller/3.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346058 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovn-acl-logging/0.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346492 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-44x2q_0c0135f6-4074-4aab-9413-a8eb948cd566/ovn-controller/0.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346813 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346848 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346855 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346863 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346873 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346881 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" exitCode=0 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346887 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" exitCode=143 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346866 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346904 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346941 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346955 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346965 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346976 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346895 4865 generic.go:334] "Generic (PLEG): container finished" podID="0c0135f6-4074-4aab-9413-a8eb948cd566" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" exitCode=143 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347013 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.346986 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347065 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347077 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347083 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347089 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347094 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347099 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347104 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347109 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347115 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347131 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347144 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347153 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347159 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347165 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347170 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347176 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347182 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347187 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347192 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347204 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347212 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347222 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347229 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347234 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347239 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347245 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347250 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347255 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347260 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347275 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347285 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347299 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-44x2q" event={"ID":"0c0135f6-4074-4aab-9413-a8eb948cd566","Type":"ContainerDied","Data":"09ba44bf347db4fbc1971fe48cda28178bc63028f515f3c02a68e691dacb46c0"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347312 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347318 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347324 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347334 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347339 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347344 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347354 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347362 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347372 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.347379 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.349214 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/2.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.349657 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/1.log" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.349707 4865 generic.go:334] "Generic (PLEG): container finished" podID="d5c89572-d108-4b35-ab46-dfbbc8b7e3be" containerID="3c2a483662a22d2466f77960d662a82ad053f1f5442f5f3d8255e03ba9ca55fa" exitCode=2 Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.349770 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerDied","Data":"3c2a483662a22d2466f77960d662a82ad053f1f5442f5f3d8255e03ba9ca55fa"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.349801 4865 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.350335 4865 scope.go:117] "RemoveContainer" containerID="3c2a483662a22d2466f77960d662a82ad053f1f5442f5f3d8255e03ba9ca55fa" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.352099 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.352105 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec71347bcd" event={"ID":"e436f3eb-958b-4b84-8e18-a2764bfaa3a5","Type":"ContainerDied","Data":"e17e5136aafe73a5897da248422868a09d003bd52addac409a8c5a83c1cafd18"} Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.352620 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e17e5136aafe73a5897da248422868a09d003bd52addac409a8c5a83c1cafd18" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.376848 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383742 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-var-lib-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383785 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-systemd-units\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383807 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-slash\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383831 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383925 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.383949 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-config\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384021 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-bin\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384039 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-netd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384208 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-env-overrides\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384302 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-ovn\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384325 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-log-socket\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384391 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-kubelet\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384459 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-445lt\" (UniqueName: \"kubernetes.io/projected/d53e0926-3c28-4c52-97a2-4f0d0763e16e-kube-api-access-445lt\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384497 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-netns\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384523 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-etc-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384583 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-script-lib\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384629 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384660 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovn-node-metrics-cert\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384691 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-systemd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384724 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-node-log\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384820 4865 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384874 4865 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0c0135f6-4074-4aab-9413-a8eb948cd566-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384890 4865 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384903 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcbts\" (UniqueName: \"kubernetes.io/projected/0c0135f6-4074-4aab-9413-a8eb948cd566-kube-api-access-vcbts\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384919 4865 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0c0135f6-4074-4aab-9413-a8eb948cd566-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.384930 4865 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0c0135f6-4074-4aab-9413-a8eb948cd566-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.410188 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-44x2q"] Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.414193 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-44x2q"] Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.417330 4865 scope.go:117] "RemoveContainer" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.437891 4865 scope.go:117] "RemoveContainer" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.454197 4865 scope.go:117] "RemoveContainer" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.467280 4865 scope.go:117] "RemoveContainer" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.481378 4865 scope.go:117] "RemoveContainer" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.485865 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-kubelet\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.485916 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-445lt\" (UniqueName: \"kubernetes.io/projected/d53e0926-3c28-4c52-97a2-4f0d0763e16e-kube-api-access-445lt\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.485944 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-netns\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.485960 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-etc-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.485983 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-script-lib\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486052 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486080 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovn-node-metrics-cert\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486106 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-systemd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486121 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-node-log\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486141 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-systemd-units\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486158 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-var-lib-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486182 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-slash\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486207 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486236 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486257 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-config\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486274 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-bin\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486293 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-netd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486317 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-env-overrides\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486377 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-ovn\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486776 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-etc-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.487107 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-node-log\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.487922 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-bin\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488060 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-systemd-units\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488103 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-slash\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488143 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488155 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-var-lib-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488327 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-openvswitch\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488662 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-ovn\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488675 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-ovn-kubernetes\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488823 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-cni-netd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488901 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-run-systemd\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.488949 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-kubelet\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.489021 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-host-run-netns\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.489039 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-config\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.489142 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-log-socket\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.489612 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovnkube-script-lib\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.486402 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d53e0926-3c28-4c52-97a2-4f0d0763e16e-log-socket\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.489884 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d53e0926-3c28-4c52-97a2-4f0d0763e16e-env-overrides\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.496803 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d53e0926-3c28-4c52-97a2-4f0d0763e16e-ovn-node-metrics-cert\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.507087 4865 scope.go:117] "RemoveContainer" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.510812 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-445lt\" (UniqueName: \"kubernetes.io/projected/d53e0926-3c28-4c52-97a2-4f0d0763e16e-kube-api-access-445lt\") pod \"ovnkube-node-49s4v\" (UID: \"d53e0926-3c28-4c52-97a2-4f0d0763e16e\") " pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.521145 4865 scope.go:117] "RemoveContainer" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.535117 4865 scope.go:117] "RemoveContainer" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.551657 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.552785 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.553356 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.553391 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} err="failed to get container status \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.553414 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.553808 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": container with ID starting with 0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f not found: ID does not exist" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.553865 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} err="failed to get container status \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": rpc error: code = NotFound desc = could not find container \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": container with ID starting with 0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.553905 4865 scope.go:117] "RemoveContainer" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.554259 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": container with ID starting with 22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58 not found: ID does not exist" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.554286 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} err="failed to get container status \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": rpc error: code = NotFound desc = could not find container \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": container with ID starting with 22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.554302 4865 scope.go:117] "RemoveContainer" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.554559 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": container with ID starting with a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538 not found: ID does not exist" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.554612 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} err="failed to get container status \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": rpc error: code = NotFound desc = could not find container \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": container with ID starting with a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.554642 4865 scope.go:117] "RemoveContainer" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.554974 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": container with ID starting with 7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c not found: ID does not exist" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555038 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} err="failed to get container status \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": rpc error: code = NotFound desc = could not find container \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": container with ID starting with 7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555058 4865 scope.go:117] "RemoveContainer" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.555331 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": container with ID starting with 4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3 not found: ID does not exist" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555354 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} err="failed to get container status \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": rpc error: code = NotFound desc = could not find container \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": container with ID starting with 4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555369 4865 scope.go:117] "RemoveContainer" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.555676 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": container with ID starting with d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58 not found: ID does not exist" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555711 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} err="failed to get container status \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": rpc error: code = NotFound desc = could not find container \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": container with ID starting with d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.555733 4865 scope.go:117] "RemoveContainer" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.556049 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": container with ID starting with 80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405 not found: ID does not exist" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556082 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} err="failed to get container status \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": rpc error: code = NotFound desc = could not find container \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": container with ID starting with 80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556102 4865 scope.go:117] "RemoveContainer" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.556380 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": container with ID starting with 7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d not found: ID does not exist" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556403 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} err="failed to get container status \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": rpc error: code = NotFound desc = could not find container \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": container with ID starting with 7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556416 4865 scope.go:117] "RemoveContainer" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: E0126 17:08:43.556651 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": container with ID starting with 81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518 not found: ID does not exist" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556685 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} err="failed to get container status \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": rpc error: code = NotFound desc = could not find container \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": container with ID starting with 81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556706 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556938 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} err="failed to get container status \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.556964 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557266 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} err="failed to get container status \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": rpc error: code = NotFound desc = could not find container \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": container with ID starting with 0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557293 4865 scope.go:117] "RemoveContainer" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557572 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} err="failed to get container status \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": rpc error: code = NotFound desc = could not find container \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": container with ID starting with 22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557600 4865 scope.go:117] "RemoveContainer" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557887 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} err="failed to get container status \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": rpc error: code = NotFound desc = could not find container \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": container with ID starting with a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.557907 4865 scope.go:117] "RemoveContainer" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558217 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} err="failed to get container status \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": rpc error: code = NotFound desc = could not find container \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": container with ID starting with 7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558245 4865 scope.go:117] "RemoveContainer" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558501 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} err="failed to get container status \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": rpc error: code = NotFound desc = could not find container \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": container with ID starting with 4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558527 4865 scope.go:117] "RemoveContainer" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558791 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} err="failed to get container status \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": rpc error: code = NotFound desc = could not find container \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": container with ID starting with d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.558824 4865 scope.go:117] "RemoveContainer" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559159 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} err="failed to get container status \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": rpc error: code = NotFound desc = could not find container \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": container with ID starting with 80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559187 4865 scope.go:117] "RemoveContainer" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559521 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} err="failed to get container status \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": rpc error: code = NotFound desc = could not find container \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": container with ID starting with 7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559540 4865 scope.go:117] "RemoveContainer" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559896 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} err="failed to get container status \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": rpc error: code = NotFound desc = could not find container \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": container with ID starting with 81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.559927 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.560138 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} err="failed to get container status \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.560169 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567254 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} err="failed to get container status \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": rpc error: code = NotFound desc = could not find container \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": container with ID starting with 0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567287 4865 scope.go:117] "RemoveContainer" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567655 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} err="failed to get container status \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": rpc error: code = NotFound desc = could not find container \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": container with ID starting with 22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567695 4865 scope.go:117] "RemoveContainer" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567913 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} err="failed to get container status \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": rpc error: code = NotFound desc = could not find container \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": container with ID starting with a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.567932 4865 scope.go:117] "RemoveContainer" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568111 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} err="failed to get container status \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": rpc error: code = NotFound desc = could not find container \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": container with ID starting with 7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568128 4865 scope.go:117] "RemoveContainer" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568316 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} err="failed to get container status \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": rpc error: code = NotFound desc = could not find container \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": container with ID starting with 4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568334 4865 scope.go:117] "RemoveContainer" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568494 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} err="failed to get container status \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": rpc error: code = NotFound desc = could not find container \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": container with ID starting with d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568510 4865 scope.go:117] "RemoveContainer" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568653 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} err="failed to get container status \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": rpc error: code = NotFound desc = could not find container \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": container with ID starting with 80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568667 4865 scope.go:117] "RemoveContainer" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568807 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} err="failed to get container status \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": rpc error: code = NotFound desc = could not find container \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": container with ID starting with 7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568821 4865 scope.go:117] "RemoveContainer" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568957 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} err="failed to get container status \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": rpc error: code = NotFound desc = could not find container \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": container with ID starting with 81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.568971 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569181 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} err="failed to get container status \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569198 4865 scope.go:117] "RemoveContainer" containerID="0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569364 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f"} err="failed to get container status \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": rpc error: code = NotFound desc = could not find container \"0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f\": container with ID starting with 0b1bdaec7b6fb09cece76cbaadc553059d80d03532328d814747fd917b452d7f not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569379 4865 scope.go:117] "RemoveContainer" containerID="22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569540 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58"} err="failed to get container status \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": rpc error: code = NotFound desc = could not find container \"22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58\": container with ID starting with 22e134a11dc6b2dc027106774799756d2fb97369c89f2ee3bd2f9bba2d273b58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569553 4865 scope.go:117] "RemoveContainer" containerID="a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569708 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538"} err="failed to get container status \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": rpc error: code = NotFound desc = could not find container \"a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538\": container with ID starting with a3a01268ff6ee5e14441936bf5f5b1acc7bedd334a2eb5e109cf57632f1e5538 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569731 4865 scope.go:117] "RemoveContainer" containerID="7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569891 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c"} err="failed to get container status \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": rpc error: code = NotFound desc = could not find container \"7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c\": container with ID starting with 7d807bf2e7459187b0245fda3c9ee7586a4cdc73106abd9f100dbb4fab6a6f2c not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.569909 4865 scope.go:117] "RemoveContainer" containerID="4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570098 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3"} err="failed to get container status \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": rpc error: code = NotFound desc = could not find container \"4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3\": container with ID starting with 4e02e3d46458fa849d67fc2d563f5eac1c23fe4911052f56fc1ebb151fc3dee3 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570118 4865 scope.go:117] "RemoveContainer" containerID="d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570281 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58"} err="failed to get container status \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": rpc error: code = NotFound desc = could not find container \"d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58\": container with ID starting with d71ca3432a562c501b200ac0022987fe0e7bea347f335db73b2d69755f040c58 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570296 4865 scope.go:117] "RemoveContainer" containerID="80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570434 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405"} err="failed to get container status \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": rpc error: code = NotFound desc = could not find container \"80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405\": container with ID starting with 80d553b7bea5cfb8ab424b365fb3d670ac413a71ff2b8bc9038fac7ec0841405 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570448 4865 scope.go:117] "RemoveContainer" containerID="7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570606 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d"} err="failed to get container status \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": rpc error: code = NotFound desc = could not find container \"7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d\": container with ID starting with 7892f8f3fe8c5bfeffaef55c1f1b6fffeb5942742821182dacf189a94bb1712d not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570621 4865 scope.go:117] "RemoveContainer" containerID="81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570840 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518"} err="failed to get container status \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": rpc error: code = NotFound desc = could not find container \"81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518\": container with ID starting with 81d24a72bb2520e2eda0b1cce2f4789e0223dc9c7970ffcf8d1e4fae7b52d518 not found: ID does not exist" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.570867 4865 scope.go:117] "RemoveContainer" containerID="a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793" Jan 26 17:08:43 crc kubenswrapper[4865]: I0126 17:08:43.571086 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793"} err="failed to get container status \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": rpc error: code = NotFound desc = could not find container \"a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793\": container with ID starting with a3b738c8fde157f3008a270e9bcd4350f331a21a42ab58313b6cbd2a86780793 not found: ID does not exist" Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.359178 4865 generic.go:334] "Generic (PLEG): container finished" podID="d53e0926-3c28-4c52-97a2-4f0d0763e16e" containerID="167dceb9f111da776b514824c0d79ffe1a32e5e58e508dec90b3244ddf402d71" exitCode=0 Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.363655 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/2.log" Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.364173 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/1.log" Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.369293 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c0135f6-4074-4aab-9413-a8eb948cd566" path="/var/lib/kubelet/pods/0c0135f6-4074-4aab-9413-a8eb948cd566/volumes" Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.370465 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerDied","Data":"167dceb9f111da776b514824c0d79ffe1a32e5e58e508dec90b3244ddf402d71"} Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.370504 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"7a2941d8020dfeb5ced95971e9f8c130108390334d77df5671296dbaf9b8ef90"} Jan 26 17:08:44 crc kubenswrapper[4865]: I0126 17:08:44.370515 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bz29j" event={"ID":"d5c89572-d108-4b35-ab46-dfbbc8b7e3be","Type":"ContainerStarted","Data":"475085b330854824e5602d57dd644d87c6fc130659f19f555409453dcc10893d"} Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.003841 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-s72p2"] Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.005195 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.006806 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.007894 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-rlx4s" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.008402 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.116407 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s5kl\" (UniqueName: \"kubernetes.io/projected/9582195a-21dd-49a2-9f9b-974c3dfcf9e2-kube-api-access-6s5kl\") pod \"nmstate-operator-646758c888-s72p2\" (UID: \"9582195a-21dd-49a2-9f9b-974c3dfcf9e2\") " pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.218740 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s5kl\" (UniqueName: \"kubernetes.io/projected/9582195a-21dd-49a2-9f9b-974c3dfcf9e2-kube-api-access-6s5kl\") pod \"nmstate-operator-646758c888-s72p2\" (UID: \"9582195a-21dd-49a2-9f9b-974c3dfcf9e2\") " pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.260311 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s5kl\" (UniqueName: \"kubernetes.io/projected/9582195a-21dd-49a2-9f9b-974c3dfcf9e2-kube-api-access-6s5kl\") pod \"nmstate-operator-646758c888-s72p2\" (UID: \"9582195a-21dd-49a2-9f9b-974c3dfcf9e2\") " pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.319178 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: E0126 17:08:45.358177 4865 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(308fb44452bb7e8639f65f95c17be08ae5639b48da6ae7f4ec0df637a1344b93): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 17:08:45 crc kubenswrapper[4865]: E0126 17:08:45.358261 4865 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(308fb44452bb7e8639f65f95c17be08ae5639b48da6ae7f4ec0df637a1344b93): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: E0126 17:08:45.358301 4865 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(308fb44452bb7e8639f65f95c17be08ae5639b48da6ae7f4ec0df637a1344b93): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:45 crc kubenswrapper[4865]: E0126 17:08:45.358360 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-646758c888-s72p2_openshift-nmstate(9582195a-21dd-49a2-9f9b-974c3dfcf9e2)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-646758c888-s72p2_openshift-nmstate(9582195a-21dd-49a2-9f9b-974c3dfcf9e2)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(308fb44452bb7e8639f65f95c17be08ae5639b48da6ae7f4ec0df637a1344b93): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" podUID="9582195a-21dd-49a2-9f9b-974c3dfcf9e2" Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.374631 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"0de8485f215f47d031cd13e78c6f8448910507cb21fecc1cb69fc0a1633b7062"} Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.374680 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"283fb7dfc3f4bc2f7052da1ef9541ad80494de222ea034b1d8cf41cc9607490e"} Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.374692 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"98ebeb1f4d90c28b8c34ad930b9f6917a875d263a846f082715e24e5fd074e71"} Jan 26 17:08:45 crc kubenswrapper[4865]: I0126 17:08:45.374703 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"43ec1afc8b42ea948d0c98b26477bcfe57b42e6264c27e40ddfb98a01a40e0b0"} Jan 26 17:08:46 crc kubenswrapper[4865]: I0126 17:08:46.025266 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:46 crc kubenswrapper[4865]: I0126 17:08:46.027072 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:47 crc kubenswrapper[4865]: I0126 17:08:47.071139 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jb7jc" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="registry-server" probeResult="failure" output=< Jan 26 17:08:47 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:08:47 crc kubenswrapper[4865]: > Jan 26 17:08:47 crc kubenswrapper[4865]: I0126 17:08:47.389889 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"c665014b63b8e77092f62708b4123a459b8d57c9c7df66d479ca6a7923580402"} Jan 26 17:08:47 crc kubenswrapper[4865]: I0126 17:08:47.390305 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"099851a2369d9204a8f4f5d187fd86f3322f7c0929cea3fd08a0f3118a73f12c"} Jan 26 17:08:49 crc kubenswrapper[4865]: I0126 17:08:49.406076 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"4e54833e6d39cd8ee80d69842366db98ed4cf17bbff52ae4d8347a158912b91f"} Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.445790 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" event={"ID":"d53e0926-3c28-4c52-97a2-4f0d0763e16e","Type":"ContainerStarted","Data":"9a50b05aecfad4d56e4fbd57c250a3d07f7b4fbaefbf77c75a132e77a8b677ff"} Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.446950 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.447051 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.447118 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.481518 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" podStartSLOduration=11.481493171 podStartE2EDuration="11.481493171s" podCreationTimestamp="2026-01-26 17:08:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:08:54.478531427 +0000 UTC m=+862.062417044" watchObservedRunningTime="2026-01-26 17:08:54.481493171 +0000 UTC m=+862.065378758" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.482926 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.485220 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.577726 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-s72p2"] Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.577854 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:54 crc kubenswrapper[4865]: I0126 17:08:54.578366 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:54 crc kubenswrapper[4865]: E0126 17:08:54.613507 4865 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(73dc44cc3fdbf3bdd6365b004e7ff3c9156fb70c50d1ffc7b7edd3b6994ff812): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 17:08:54 crc kubenswrapper[4865]: E0126 17:08:54.613616 4865 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(73dc44cc3fdbf3bdd6365b004e7ff3c9156fb70c50d1ffc7b7edd3b6994ff812): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:54 crc kubenswrapper[4865]: E0126 17:08:54.613649 4865 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(73dc44cc3fdbf3bdd6365b004e7ff3c9156fb70c50d1ffc7b7edd3b6994ff812): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:08:54 crc kubenswrapper[4865]: E0126 17:08:54.613711 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"nmstate-operator-646758c888-s72p2_openshift-nmstate(9582195a-21dd-49a2-9f9b-974c3dfcf9e2)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"nmstate-operator-646758c888-s72p2_openshift-nmstate(9582195a-21dd-49a2-9f9b-974c3dfcf9e2)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_nmstate-operator-646758c888-s72p2_openshift-nmstate_9582195a-21dd-49a2-9f9b-974c3dfcf9e2_0(73dc44cc3fdbf3bdd6365b004e7ff3c9156fb70c50d1ffc7b7edd3b6994ff812): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" podUID="9582195a-21dd-49a2-9f9b-974c3dfcf9e2" Jan 26 17:08:56 crc kubenswrapper[4865]: I0126 17:08:56.070440 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:56 crc kubenswrapper[4865]: I0126 17:08:56.129041 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:56 crc kubenswrapper[4865]: I0126 17:08:56.300243 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:08:57 crc kubenswrapper[4865]: I0126 17:08:57.462302 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jb7jc" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="registry-server" containerID="cri-o://e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407" gracePeriod=2 Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.366367 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.470289 4865 generic.go:334] "Generic (PLEG): container finished" podID="480e659a-feec-47ca-b882-cee0df193df0" containerID="e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407" exitCode=0 Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.470343 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerDied","Data":"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407"} Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.470358 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jb7jc" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.470383 4865 scope.go:117] "RemoveContainer" containerID="e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.470371 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jb7jc" event={"ID":"480e659a-feec-47ca-b882-cee0df193df0","Type":"ContainerDied","Data":"8ae169dd5c13409c621150a5d133e81beca289ec536b60ea202f3d7abb7d8548"} Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.491818 4865 scope.go:117] "RemoveContainer" containerID="3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.505656 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities\") pod \"480e659a-feec-47ca-b882-cee0df193df0\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.505816 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqpzv\" (UniqueName: \"kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv\") pod \"480e659a-feec-47ca-b882-cee0df193df0\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.505894 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content\") pod \"480e659a-feec-47ca-b882-cee0df193df0\" (UID: \"480e659a-feec-47ca-b882-cee0df193df0\") " Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.506986 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities" (OuterVolumeSpecName: "utilities") pod "480e659a-feec-47ca-b882-cee0df193df0" (UID: "480e659a-feec-47ca-b882-cee0df193df0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.509612 4865 scope.go:117] "RemoveContainer" containerID="857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.513058 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv" (OuterVolumeSpecName: "kube-api-access-dqpzv") pod "480e659a-feec-47ca-b882-cee0df193df0" (UID: "480e659a-feec-47ca-b882-cee0df193df0"). InnerVolumeSpecName "kube-api-access-dqpzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.566923 4865 scope.go:117] "RemoveContainer" containerID="e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407" Jan 26 17:08:58 crc kubenswrapper[4865]: E0126 17:08:58.567496 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407\": container with ID starting with e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407 not found: ID does not exist" containerID="e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.567551 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407"} err="failed to get container status \"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407\": rpc error: code = NotFound desc = could not find container \"e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407\": container with ID starting with e2c7ee5ec1270c7805c846b41a5125d5efc49491a0761f758887e3065ef40407 not found: ID does not exist" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.567592 4865 scope.go:117] "RemoveContainer" containerID="3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145" Jan 26 17:08:58 crc kubenswrapper[4865]: E0126 17:08:58.567883 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145\": container with ID starting with 3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145 not found: ID does not exist" containerID="3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.567930 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145"} err="failed to get container status \"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145\": rpc error: code = NotFound desc = could not find container \"3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145\": container with ID starting with 3347148f22e0667651b2de6f3fcc60e7039373607cd926c7be90c05a487ca145 not found: ID does not exist" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.567960 4865 scope.go:117] "RemoveContainer" containerID="857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2" Jan 26 17:08:58 crc kubenswrapper[4865]: E0126 17:08:58.568607 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2\": container with ID starting with 857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2 not found: ID does not exist" containerID="857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.568681 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2"} err="failed to get container status \"857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2\": rpc error: code = NotFound desc = could not find container \"857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2\": container with ID starting with 857364aa1ea3b96f9fb96c783b38d391c05abfc81580a7cd4ebf27d72146aca2 not found: ID does not exist" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.607043 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqpzv\" (UniqueName: \"kubernetes.io/projected/480e659a-feec-47ca-b882-cee0df193df0-kube-api-access-dqpzv\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.607091 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.622883 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "480e659a-feec-47ca-b882-cee0df193df0" (UID: "480e659a-feec-47ca-b882-cee0df193df0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.708494 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/480e659a-feec-47ca-b882-cee0df193df0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.800281 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:08:58 crc kubenswrapper[4865]: I0126 17:08:58.805173 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jb7jc"] Jan 26 17:09:00 crc kubenswrapper[4865]: I0126 17:09:00.364948 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="480e659a-feec-47ca-b882-cee0df193df0" path="/var/lib/kubelet/pods/480e659a-feec-47ca-b882-cee0df193df0/volumes" Jan 26 17:09:08 crc kubenswrapper[4865]: I0126 17:09:08.357397 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:09:08 crc kubenswrapper[4865]: I0126 17:09:08.359088 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" Jan 26 17:09:08 crc kubenswrapper[4865]: I0126 17:09:08.677203 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-s72p2"] Jan 26 17:09:08 crc kubenswrapper[4865]: W0126 17:09:08.688982 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9582195a_21dd_49a2_9f9b_974c3dfcf9e2.slice/crio-902c357995e175a251abf7b3860ed084525784edc73d1d6bd6d13f067858ee1c WatchSource:0}: Error finding container 902c357995e175a251abf7b3860ed084525784edc73d1d6bd6d13f067858ee1c: Status 404 returned error can't find the container with id 902c357995e175a251abf7b3860ed084525784edc73d1d6bd6d13f067858ee1c Jan 26 17:09:09 crc kubenswrapper[4865]: I0126 17:09:09.680090 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" event={"ID":"9582195a-21dd-49a2-9f9b-974c3dfcf9e2","Type":"ContainerStarted","Data":"902c357995e175a251abf7b3860ed084525784edc73d1d6bd6d13f067858ee1c"} Jan 26 17:09:12 crc kubenswrapper[4865]: I0126 17:09:12.699122 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" event={"ID":"9582195a-21dd-49a2-9f9b-974c3dfcf9e2","Type":"ContainerStarted","Data":"ba791520ade1acaa0e92e2a4618e28a1828dd643d02d0666cdac3987a7e2b921"} Jan 26 17:09:12 crc kubenswrapper[4865]: I0126 17:09:12.723511 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-s72p2" podStartSLOduration=25.843236498 podStartE2EDuration="28.72349195s" podCreationTimestamp="2026-01-26 17:08:44 +0000 UTC" firstStartedPulling="2026-01-26 17:09:08.692642863 +0000 UTC m=+876.276528450" lastFinishedPulling="2026-01-26 17:09:11.572898315 +0000 UTC m=+879.156783902" observedRunningTime="2026-01-26 17:09:12.719213528 +0000 UTC m=+880.303099115" watchObservedRunningTime="2026-01-26 17:09:12.72349195 +0000 UTC m=+880.307377537" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.584338 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-49s4v" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.778241 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-mvzwb"] Jan 26 17:09:13 crc kubenswrapper[4865]: E0126 17:09:13.778496 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="extract-content" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.778510 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="extract-content" Jan 26 17:09:13 crc kubenswrapper[4865]: E0126 17:09:13.778531 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="registry-server" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.778537 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="registry-server" Jan 26 17:09:13 crc kubenswrapper[4865]: E0126 17:09:13.778548 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="extract-utilities" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.778554 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="extract-utilities" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.778652 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="480e659a-feec-47ca-b882-cee0df193df0" containerName="registry-server" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.779251 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.782064 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-mq8rr" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.815707 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b"] Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.816898 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.818949 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-mvzwb"] Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.821061 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.821847 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-bpm6l"] Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.822676 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.840595 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b"] Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.843939 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z84zb\" (UniqueName: \"kubernetes.io/projected/00eb8a62-37f1-4a31-a458-98234f852e9b-kube-api-access-z84zb\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844016 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72c68\" (UniqueName: \"kubernetes.io/projected/b54078de-88f1-412c-b802-7655f0ed4386-kube-api-access-72c68\") pod \"nmstate-metrics-54757c584b-mvzwb\" (UID: \"b54078de-88f1-412c-b802-7655f0ed4386\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844043 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844122 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7bf5\" (UniqueName: \"kubernetes.io/projected/c6acac7c-0c9a-4163-a36e-64eaf898d58a-kube-api-access-k7bf5\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844142 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-ovs-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844178 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-dbus-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.844220 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-nmstate-lock\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.945869 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72c68\" (UniqueName: \"kubernetes.io/projected/b54078de-88f1-412c-b802-7655f0ed4386-kube-api-access-72c68\") pod \"nmstate-metrics-54757c584b-mvzwb\" (UID: \"b54078de-88f1-412c-b802-7655f0ed4386\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.945924 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.945954 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7bf5\" (UniqueName: \"kubernetes.io/projected/c6acac7c-0c9a-4163-a36e-64eaf898d58a-kube-api-access-k7bf5\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.945978 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-ovs-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.946017 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-dbus-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.946055 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-nmstate-lock\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.946084 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z84zb\" (UniqueName: \"kubernetes.io/projected/00eb8a62-37f1-4a31-a458-98234f852e9b-kube-api-access-z84zb\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: E0126 17:09:13.946566 4865 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Jan 26 17:09:13 crc kubenswrapper[4865]: E0126 17:09:13.946644 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair podName:c6acac7c-0c9a-4163-a36e-64eaf898d58a nodeName:}" failed. No retries permitted until 2026-01-26 17:09:14.446610824 +0000 UTC m=+882.030496411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair") pod "nmstate-webhook-8474b5b9d8-s5d4b" (UID: "c6acac7c-0c9a-4163-a36e-64eaf898d58a") : secret "openshift-nmstate-webhook" not found Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.946876 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-ovs-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.949428 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-dbus-socket\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.949477 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/00eb8a62-37f1-4a31-a458-98234f852e9b-nmstate-lock\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.974532 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7bf5\" (UniqueName: \"kubernetes.io/projected/c6acac7c-0c9a-4163-a36e-64eaf898d58a-kube-api-access-k7bf5\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.975686 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z84zb\" (UniqueName: \"kubernetes.io/projected/00eb8a62-37f1-4a31-a458-98234f852e9b-kube-api-access-z84zb\") pod \"nmstate-handler-bpm6l\" (UID: \"00eb8a62-37f1-4a31-a458-98234f852e9b\") " pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.979274 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72c68\" (UniqueName: \"kubernetes.io/projected/b54078de-88f1-412c-b802-7655f0ed4386-kube-api-access-72c68\") pod \"nmstate-metrics-54757c584b-mvzwb\" (UID: \"b54078de-88f1-412c-b802-7655f0ed4386\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.981062 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8"] Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.981921 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.984671 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-fxt4m" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.984846 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.985044 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 26 17:09:13 crc kubenswrapper[4865]: I0126 17:09:13.993604 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8"] Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.133057 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.148193 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8881cc48-35fa-4f5d-b5e0-ba19d609616d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.148778 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8881cc48-35fa-4f5d-b5e0-ba19d609616d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.148453 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.148840 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbvv7\" (UniqueName: \"kubernetes.io/projected/8881cc48-35fa-4f5d-b5e0-ba19d609616d-kube-api-access-xbvv7\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.206604 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-7b47776466-x5qd5"] Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.208338 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.229470 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b47776466-x5qd5"] Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.250212 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8881cc48-35fa-4f5d-b5e0-ba19d609616d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.251744 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8881cc48-35fa-4f5d-b5e0-ba19d609616d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.254046 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbvv7\" (UniqueName: \"kubernetes.io/projected/8881cc48-35fa-4f5d-b5e0-ba19d609616d-kube-api-access-xbvv7\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.254669 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8881cc48-35fa-4f5d-b5e0-ba19d609616d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.294436 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbvv7\" (UniqueName: \"kubernetes.io/projected/8881cc48-35fa-4f5d-b5e0-ba19d609616d-kube-api-access-xbvv7\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.294584 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8881cc48-35fa-4f5d-b5e0-ba19d609616d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-wvrt8\" (UID: \"8881cc48-35fa-4f5d-b5e0-ba19d609616d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.329231 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358046 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-trusted-ca-bundle\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358502 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-service-ca\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358539 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-console-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358577 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md92n\" (UniqueName: \"kubernetes.io/projected/43e668f1-7d39-4a29-bc02-627955f0b407-kube-api-access-md92n\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358599 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358646 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-oauth-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.358704 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-oauth-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.441903 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-mvzwb"] Jan 26 17:09:14 crc kubenswrapper[4865]: W0126 17:09:14.452028 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb54078de_88f1_412c_b802_7655f0ed4386.slice/crio-7e33fd2443121ca46c96c86c89b85b57263ce2cc5c4dcd04ad15b664f1b0e26d WatchSource:0}: Error finding container 7e33fd2443121ca46c96c86c89b85b57263ce2cc5c4dcd04ad15b664f1b0e26d: Status 404 returned error can't find the container with id 7e33fd2443121ca46c96c86c89b85b57263ce2cc5c4dcd04ad15b664f1b0e26d Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.460728 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461374 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md92n\" (UniqueName: \"kubernetes.io/projected/43e668f1-7d39-4a29-bc02-627955f0b407-kube-api-access-md92n\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461421 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461450 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-oauth-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461511 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-oauth-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461575 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-trusted-ca-bundle\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461600 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-service-ca\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.461628 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-console-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.463202 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-console-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.465528 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-trusted-ca-bundle\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.465911 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-oauth-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.466411 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43e668f1-7d39-4a29-bc02-627955f0b407-service-ca\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.470337 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/c6acac7c-0c9a-4163-a36e-64eaf898d58a-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-s5d4b\" (UID: \"c6acac7c-0c9a-4163-a36e-64eaf898d58a\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.470376 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-serving-cert\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.476211 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43e668f1-7d39-4a29-bc02-627955f0b407-console-oauth-config\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.486226 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md92n\" (UniqueName: \"kubernetes.io/projected/43e668f1-7d39-4a29-bc02-627955f0b407-kube-api-access-md92n\") pod \"console-7b47776466-x5qd5\" (UID: \"43e668f1-7d39-4a29-bc02-627955f0b407\") " pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.569571 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.575888 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8"] Jan 26 17:09:14 crc kubenswrapper[4865]: W0126 17:09:14.595429 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8881cc48_35fa_4f5d_b5e0_ba19d609616d.slice/crio-f2247e2c342d62329b56f8e137916e97a5d7c0e533c82098fbc0710a2a896a47 WatchSource:0}: Error finding container f2247e2c342d62329b56f8e137916e97a5d7c0e533c82098fbc0710a2a896a47: Status 404 returned error can't find the container with id f2247e2c342d62329b56f8e137916e97a5d7c0e533c82098fbc0710a2a896a47 Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.715952 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" event={"ID":"b54078de-88f1-412c-b802-7655f0ed4386","Type":"ContainerStarted","Data":"7e33fd2443121ca46c96c86c89b85b57263ce2cc5c4dcd04ad15b664f1b0e26d"} Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.717040 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bpm6l" event={"ID":"00eb8a62-37f1-4a31-a458-98234f852e9b","Type":"ContainerStarted","Data":"0a7c714d526a39c6a80b10f75159b5a84eba7048a35c3f1385c6e91aa094c8d1"} Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.717836 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" event={"ID":"8881cc48-35fa-4f5d-b5e0-ba19d609616d","Type":"ContainerStarted","Data":"f2247e2c342d62329b56f8e137916e97a5d7c0e533c82098fbc0710a2a896a47"} Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.735405 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.940852 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b"] Jan 26 17:09:14 crc kubenswrapper[4865]: W0126 17:09:14.947050 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6acac7c_0c9a_4163_a36e_64eaf898d58a.slice/crio-823394a0a776f58039241395ffd256ad9e7b4d959db4be5199e580da5df32d0e WatchSource:0}: Error finding container 823394a0a776f58039241395ffd256ad9e7b4d959db4be5199e580da5df32d0e: Status 404 returned error can't find the container with id 823394a0a776f58039241395ffd256ad9e7b4d959db4be5199e580da5df32d0e Jan 26 17:09:14 crc kubenswrapper[4865]: I0126 17:09:14.975432 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-7b47776466-x5qd5"] Jan 26 17:09:14 crc kubenswrapper[4865]: W0126 17:09:14.977297 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod43e668f1_7d39_4a29_bc02_627955f0b407.slice/crio-8b8bd3c5a2cfb367e899ac502bc8d357c98f14ce0ae199dc9eff5a703cad8025 WatchSource:0}: Error finding container 8b8bd3c5a2cfb367e899ac502bc8d357c98f14ce0ae199dc9eff5a703cad8025: Status 404 returned error can't find the container with id 8b8bd3c5a2cfb367e899ac502bc8d357c98f14ce0ae199dc9eff5a703cad8025 Jan 26 17:09:15 crc kubenswrapper[4865]: I0126 17:09:15.732892 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" event={"ID":"c6acac7c-0c9a-4163-a36e-64eaf898d58a","Type":"ContainerStarted","Data":"823394a0a776f58039241395ffd256ad9e7b4d959db4be5199e580da5df32d0e"} Jan 26 17:09:15 crc kubenswrapper[4865]: I0126 17:09:15.735258 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b47776466-x5qd5" event={"ID":"43e668f1-7d39-4a29-bc02-627955f0b407","Type":"ContainerStarted","Data":"a8d5e60b72a34d2040b85c253dad36fca3da2f6ed39f703dfbf421bce74e13a3"} Jan 26 17:09:15 crc kubenswrapper[4865]: I0126 17:09:15.735297 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-7b47776466-x5qd5" event={"ID":"43e668f1-7d39-4a29-bc02-627955f0b407","Type":"ContainerStarted","Data":"8b8bd3c5a2cfb367e899ac502bc8d357c98f14ce0ae199dc9eff5a703cad8025"} Jan 26 17:09:15 crc kubenswrapper[4865]: I0126 17:09:15.763484 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-7b47776466-x5qd5" podStartSLOduration=1.76343258 podStartE2EDuration="1.76343258s" podCreationTimestamp="2026-01-26 17:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:09:15.753913189 +0000 UTC m=+883.337798766" watchObservedRunningTime="2026-01-26 17:09:15.76343258 +0000 UTC m=+883.347318167" Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.755915 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" event={"ID":"b54078de-88f1-412c-b802-7655f0ed4386","Type":"ContainerStarted","Data":"f6c526eb11aae294f39b04cc1db33ae6045a4cc693d941ac0f41ef99f12075d2"} Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.757746 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bpm6l" event={"ID":"00eb8a62-37f1-4a31-a458-98234f852e9b","Type":"ContainerStarted","Data":"282629b77071439aa31d5721c4e1006804968f7265d918904b6b9bba5cfcce10"} Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.757833 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.759408 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" event={"ID":"c6acac7c-0c9a-4163-a36e-64eaf898d58a","Type":"ContainerStarted","Data":"85deee1c1ca2432a356e02c680868dbf7484f55fa0ff541a9b0da7f923b0abc2"} Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.759567 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.760874 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" event={"ID":"8881cc48-35fa-4f5d-b5e0-ba19d609616d","Type":"ContainerStarted","Data":"3c6d94d9b4659f93b8a7be4ae408ae7653da05a2315b234d3e9f8cccebe7e544"} Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.778586 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-bpm6l" podStartSLOduration=2.080169805 podStartE2EDuration="5.778562605s" podCreationTimestamp="2026-01-26 17:09:13 +0000 UTC" firstStartedPulling="2026-01-26 17:09:14.193619773 +0000 UTC m=+881.777505360" lastFinishedPulling="2026-01-26 17:09:17.892012573 +0000 UTC m=+885.475898160" observedRunningTime="2026-01-26 17:09:18.774721235 +0000 UTC m=+886.358606832" watchObservedRunningTime="2026-01-26 17:09:18.778562605 +0000 UTC m=+886.362448192" Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.793275 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" podStartSLOduration=2.827799088 podStartE2EDuration="5.793251572s" podCreationTimestamp="2026-01-26 17:09:13 +0000 UTC" firstStartedPulling="2026-01-26 17:09:14.94945892 +0000 UTC m=+882.533344507" lastFinishedPulling="2026-01-26 17:09:17.914911404 +0000 UTC m=+885.498796991" observedRunningTime="2026-01-26 17:09:18.788766064 +0000 UTC m=+886.372651651" watchObservedRunningTime="2026-01-26 17:09:18.793251572 +0000 UTC m=+886.377137159" Jan 26 17:09:18 crc kubenswrapper[4865]: I0126 17:09:18.810611 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-wvrt8" podStartSLOduration=2.511034708 podStartE2EDuration="5.810587775s" podCreationTimestamp="2026-01-26 17:09:13 +0000 UTC" firstStartedPulling="2026-01-26 17:09:14.598657652 +0000 UTC m=+882.182543239" lastFinishedPulling="2026-01-26 17:09:17.898210719 +0000 UTC m=+885.482096306" observedRunningTime="2026-01-26 17:09:18.806875189 +0000 UTC m=+886.390760776" watchObservedRunningTime="2026-01-26 17:09:18.810587775 +0000 UTC m=+886.394473382" Jan 26 17:09:20 crc kubenswrapper[4865]: I0126 17:09:20.777689 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" event={"ID":"b54078de-88f1-412c-b802-7655f0ed4386","Type":"ContainerStarted","Data":"a250c971e5021f44fdab471fa4b70b4d7ba184ed1aa3ed5b21a056c897df7bab"} Jan 26 17:09:20 crc kubenswrapper[4865]: I0126 17:09:20.830240 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-mvzwb" podStartSLOduration=1.9767799959999999 podStartE2EDuration="7.830215392s" podCreationTimestamp="2026-01-26 17:09:13 +0000 UTC" firstStartedPulling="2026-01-26 17:09:14.455780422 +0000 UTC m=+882.039666009" lastFinishedPulling="2026-01-26 17:09:20.309215808 +0000 UTC m=+887.893101405" observedRunningTime="2026-01-26 17:09:20.827870045 +0000 UTC m=+888.411755632" watchObservedRunningTime="2026-01-26 17:09:20.830215392 +0000 UTC m=+888.414100979" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.176431 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-bpm6l" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.570446 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.570492 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.575159 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.806460 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-7b47776466-x5qd5" Jan 26 17:09:24 crc kubenswrapper[4865]: I0126 17:09:24.872821 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 17:09:34 crc kubenswrapper[4865]: I0126 17:09:34.743858 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-s5d4b" Jan 26 17:09:34 crc kubenswrapper[4865]: I0126 17:09:34.780831 4865 scope.go:117] "RemoveContainer" containerID="d80fdc9f189133778add9127ee70dbdd4ca23ed09cf047218ed4ce9e4b07ce36" Jan 26 17:09:34 crc kubenswrapper[4865]: I0126 17:09:34.871304 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bz29j_d5c89572-d108-4b35-ab46-dfbbc8b7e3be/kube-multus/2.log" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.334403 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs"] Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.336346 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.338641 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.347350 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs"] Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.424183 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfdrk\" (UniqueName: \"kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.424686 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.424757 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.525400 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.525676 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.525811 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfdrk\" (UniqueName: \"kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.526664 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.534507 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.559791 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfdrk\" (UniqueName: \"kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.661114 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:49 crc kubenswrapper[4865]: I0126 17:09:49.919581 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-dsngx" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" containerID="cri-o://f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd" gracePeriod=15 Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.109603 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs"] Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.253350 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dsngx_f8fac562-f66c-433b-9e4a-1a08fe6d78f5/console/0.log" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.253416 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437450 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437579 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96p5c\" (UniqueName: \"kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437604 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437631 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437671 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437693 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.437726 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert\") pod \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\" (UID: \"f8fac562-f66c-433b-9e4a-1a08fe6d78f5\") " Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.439262 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.439457 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config" (OuterVolumeSpecName: "console-config") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.439656 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.439699 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.444766 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.445057 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.445089 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c" (OuterVolumeSpecName: "kube-api-access-96p5c") pod "f8fac562-f66c-433b-9e4a-1a08fe6d78f5" (UID: "f8fac562-f66c-433b-9e4a-1a08fe6d78f5"). InnerVolumeSpecName "kube-api-access-96p5c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539273 4865 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539317 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96p5c\" (UniqueName: \"kubernetes.io/projected/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-kube-api-access-96p5c\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539339 4865 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539350 4865 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539361 4865 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539371 4865 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.539383 4865 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f8fac562-f66c-433b-9e4a-1a08fe6d78f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987216 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dsngx_f8fac562-f66c-433b-9e4a-1a08fe6d78f5/console/0.log" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987275 4865 generic.go:334] "Generic (PLEG): container finished" podID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerID="f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd" exitCode=2 Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987341 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsngx" event={"ID":"f8fac562-f66c-433b-9e4a-1a08fe6d78f5","Type":"ContainerDied","Data":"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd"} Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987373 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsngx" event={"ID":"f8fac562-f66c-433b-9e4a-1a08fe6d78f5","Type":"ContainerDied","Data":"ad9757fd3ce2e278b3e54f0a0d553f7ef6ba039725cb58726093d764ac9d814d"} Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987368 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsngx" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.987408 4865 scope.go:117] "RemoveContainer" containerID="f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd" Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.990260 4865 generic.go:334] "Generic (PLEG): container finished" podID="220af678-5662-4900-a059-63e491bcc0a3" containerID="7bec6727d1b0aa726225a050d5a47a057d1c85b023098152e67b034d4d2e262e" exitCode=0 Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.990339 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" event={"ID":"220af678-5662-4900-a059-63e491bcc0a3","Type":"ContainerDied","Data":"7bec6727d1b0aa726225a050d5a47a057d1c85b023098152e67b034d4d2e262e"} Jan 26 17:09:50 crc kubenswrapper[4865]: I0126 17:09:50.991302 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" event={"ID":"220af678-5662-4900-a059-63e491bcc0a3","Type":"ContainerStarted","Data":"31294e39ce3c5f01c280c390f527443171475be347e440583ed237ea51727f46"} Jan 26 17:09:51 crc kubenswrapper[4865]: I0126 17:09:51.014352 4865 scope.go:117] "RemoveContainer" containerID="f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd" Jan 26 17:09:51 crc kubenswrapper[4865]: E0126 17:09:51.015249 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd\": container with ID starting with f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd not found: ID does not exist" containerID="f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd" Jan 26 17:09:51 crc kubenswrapper[4865]: I0126 17:09:51.015298 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd"} err="failed to get container status \"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd\": rpc error: code = NotFound desc = could not find container \"f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd\": container with ID starting with f0cf2e24a587d14b3f9cb39afc91fd2ef9c46491d704dbf457c3e727b84876dd not found: ID does not exist" Jan 26 17:09:51 crc kubenswrapper[4865]: I0126 17:09:51.027229 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 17:09:51 crc kubenswrapper[4865]: I0126 17:09:51.031328 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-dsngx"] Jan 26 17:09:52 crc kubenswrapper[4865]: I0126 17:09:52.367646 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" path="/var/lib/kubelet/pods/f8fac562-f66c-433b-9e4a-1a08fe6d78f5/volumes" Jan 26 17:09:53 crc kubenswrapper[4865]: I0126 17:09:53.010361 4865 generic.go:334] "Generic (PLEG): container finished" podID="220af678-5662-4900-a059-63e491bcc0a3" containerID="eb4628d47f41bf9244aa6a561fb25f9bda1e36b72f30623ae57c7790069900f6" exitCode=0 Jan 26 17:09:53 crc kubenswrapper[4865]: I0126 17:09:53.010455 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" event={"ID":"220af678-5662-4900-a059-63e491bcc0a3","Type":"ContainerDied","Data":"eb4628d47f41bf9244aa6a561fb25f9bda1e36b72f30623ae57c7790069900f6"} Jan 26 17:09:54 crc kubenswrapper[4865]: I0126 17:09:54.019611 4865 generic.go:334] "Generic (PLEG): container finished" podID="220af678-5662-4900-a059-63e491bcc0a3" containerID="b19718c1113235dfb06b24d1a03b56068d52cc8b1847d9352816544fdcf4267b" exitCode=0 Jan 26 17:09:54 crc kubenswrapper[4865]: I0126 17:09:54.019673 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" event={"ID":"220af678-5662-4900-a059-63e491bcc0a3","Type":"ContainerDied","Data":"b19718c1113235dfb06b24d1a03b56068d52cc8b1847d9352816544fdcf4267b"} Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.255470 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.413269 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hfdrk\" (UniqueName: \"kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk\") pod \"220af678-5662-4900-a059-63e491bcc0a3\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.413367 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle\") pod \"220af678-5662-4900-a059-63e491bcc0a3\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.413464 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util\") pod \"220af678-5662-4900-a059-63e491bcc0a3\" (UID: \"220af678-5662-4900-a059-63e491bcc0a3\") " Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.414461 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle" (OuterVolumeSpecName: "bundle") pod "220af678-5662-4900-a059-63e491bcc0a3" (UID: "220af678-5662-4900-a059-63e491bcc0a3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.420912 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk" (OuterVolumeSpecName: "kube-api-access-hfdrk") pod "220af678-5662-4900-a059-63e491bcc0a3" (UID: "220af678-5662-4900-a059-63e491bcc0a3"). InnerVolumeSpecName "kube-api-access-hfdrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.430212 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util" (OuterVolumeSpecName: "util") pod "220af678-5662-4900-a059-63e491bcc0a3" (UID: "220af678-5662-4900-a059-63e491bcc0a3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.515274 4865 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-util\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.515321 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hfdrk\" (UniqueName: \"kubernetes.io/projected/220af678-5662-4900-a059-63e491bcc0a3-kube-api-access-hfdrk\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:55 crc kubenswrapper[4865]: I0126 17:09:55.515333 4865 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/220af678-5662-4900-a059-63e491bcc0a3-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:09:56 crc kubenswrapper[4865]: I0126 17:09:56.046645 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" Jan 26 17:09:56 crc kubenswrapper[4865]: I0126 17:09:56.046629 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc5b6zs" event={"ID":"220af678-5662-4900-a059-63e491bcc0a3","Type":"ContainerDied","Data":"31294e39ce3c5f01c280c390f527443171475be347e440583ed237ea51727f46"} Jan 26 17:09:56 crc kubenswrapper[4865]: I0126 17:09:56.046803 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31294e39ce3c5f01c280c390f527443171475be347e440583ed237ea51727f46" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.090170 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:03 crc kubenswrapper[4865]: E0126 17:10:03.092524 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="util" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.092651 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="util" Jan 26 17:10:03 crc kubenswrapper[4865]: E0126 17:10:03.092740 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="extract" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.092818 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="extract" Jan 26 17:10:03 crc kubenswrapper[4865]: E0126 17:10:03.092917 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="pull" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.093024 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="pull" Jan 26 17:10:03 crc kubenswrapper[4865]: E0126 17:10:03.093120 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.093201 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.093434 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="220af678-5662-4900-a059-63e491bcc0a3" containerName="extract" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.093540 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="f8fac562-f66c-433b-9e4a-1a08fe6d78f5" containerName="console" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.094551 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.100819 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.227921 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.228648 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.228792 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fdqx\" (UniqueName: \"kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.329818 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.330203 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fdqx\" (UniqueName: \"kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.330365 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.330375 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.330974 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.357784 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fdqx\" (UniqueName: \"kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx\") pod \"certified-operators-mjrzc\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:03 crc kubenswrapper[4865]: I0126 17:10:03.416687 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.326470 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc"] Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.328142 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.337595 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.337736 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-6g8kx" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.337631 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.341860 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.342115 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.464801 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-apiservice-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.464904 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-webhook-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.464939 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6f8j\" (UniqueName: \"kubernetes.io/projected/97bc7dfb-5005-4c09-9933-2082c2451076-kube-api-access-v6f8j\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.521832 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc"] Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.561505 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.568727 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-apiservice-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.568816 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-webhook-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.568854 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6f8j\" (UniqueName: \"kubernetes.io/projected/97bc7dfb-5005-4c09-9933-2082c2451076-kube-api-access-v6f8j\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.597663 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-webhook-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.639256 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97bc7dfb-5005-4c09-9933-2082c2451076-apiservice-cert\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.730942 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6f8j\" (UniqueName: \"kubernetes.io/projected/97bc7dfb-5005-4c09-9933-2082c2451076-kube-api-access-v6f8j\") pod \"metallb-operator-controller-manager-bcd7fd588-487fc\" (UID: \"97bc7dfb-5005-4c09-9933-2082c2451076\") " pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:04 crc kubenswrapper[4865]: I0126 17:10:04.960105 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.127882 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj"] Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.128618 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.128788 4865 generic.go:334] "Generic (PLEG): container finished" podID="329d43a5-6dba-4b29-833d-affb7c051a82" containerID="61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f" exitCode=0 Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.128835 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerDied","Data":"61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f"} Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.128857 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerStarted","Data":"0193671665f9b9fbfdffa884f06ba69b9b8064a31e07b74e1df2e94f0f80544a"} Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.135047 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.137479 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.137485 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-b62pr" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.217624 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj"] Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.315442 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvx4f\" (UniqueName: \"kubernetes.io/projected/90825b12-d065-4f87-b04c-835122364369-kube-api-access-fvx4f\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.315528 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-webhook-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.315549 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-apiservice-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.385340 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc"] Jan 26 17:10:05 crc kubenswrapper[4865]: W0126 17:10:05.396500 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97bc7dfb_5005_4c09_9933_2082c2451076.slice/crio-3616aef37acb18cc9b8f8d19639c9c8c2b4a458de357dc8fd604d2553efe574f WatchSource:0}: Error finding container 3616aef37acb18cc9b8f8d19639c9c8c2b4a458de357dc8fd604d2553efe574f: Status 404 returned error can't find the container with id 3616aef37acb18cc9b8f8d19639c9c8c2b4a458de357dc8fd604d2553efe574f Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.417451 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-webhook-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.417652 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-apiservice-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.417764 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvx4f\" (UniqueName: \"kubernetes.io/projected/90825b12-d065-4f87-b04c-835122364369-kube-api-access-fvx4f\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.425863 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-webhook-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.429189 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/90825b12-d065-4f87-b04c-835122364369-apiservice-cert\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.436293 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvx4f\" (UniqueName: \"kubernetes.io/projected/90825b12-d065-4f87-b04c-835122364369-kube-api-access-fvx4f\") pod \"metallb-operator-webhook-server-74c965d86f-87jrj\" (UID: \"90825b12-d065-4f87-b04c-835122364369\") " pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.520685 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:05 crc kubenswrapper[4865]: I0126 17:10:05.996092 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj"] Jan 26 17:10:06 crc kubenswrapper[4865]: I0126 17:10:06.136436 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" event={"ID":"97bc7dfb-5005-4c09-9933-2082c2451076","Type":"ContainerStarted","Data":"3616aef37acb18cc9b8f8d19639c9c8c2b4a458de357dc8fd604d2553efe574f"} Jan 26 17:10:06 crc kubenswrapper[4865]: I0126 17:10:06.138536 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" event={"ID":"90825b12-d065-4f87-b04c-835122364369","Type":"ContainerStarted","Data":"377097d8f7c87fef886fc212f5ad22ffbc17d8a66ba31192ed8bf61a8fa68175"} Jan 26 17:10:06 crc kubenswrapper[4865]: I0126 17:10:06.141022 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerStarted","Data":"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7"} Jan 26 17:10:07 crc kubenswrapper[4865]: I0126 17:10:07.156826 4865 generic.go:334] "Generic (PLEG): container finished" podID="329d43a5-6dba-4b29-833d-affb7c051a82" containerID="2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7" exitCode=0 Jan 26 17:10:07 crc kubenswrapper[4865]: I0126 17:10:07.157131 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerDied","Data":"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7"} Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.211117 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerStarted","Data":"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257"} Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.233679 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mjrzc" podStartSLOduration=3.06757832 podStartE2EDuration="6.233657094s" podCreationTimestamp="2026-01-26 17:10:03 +0000 UTC" firstStartedPulling="2026-01-26 17:10:05.133505018 +0000 UTC m=+932.717390605" lastFinishedPulling="2026-01-26 17:10:08.299583792 +0000 UTC m=+935.883469379" observedRunningTime="2026-01-26 17:10:09.232698616 +0000 UTC m=+936.816584213" watchObservedRunningTime="2026-01-26 17:10:09.233657094 +0000 UTC m=+936.817542681" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.291661 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.292747 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.311959 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.448683 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.448818 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.448948 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p965\" (UniqueName: \"kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.551070 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.551197 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.551236 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p965\" (UniqueName: \"kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.551731 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.551760 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.589084 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p965\" (UniqueName: \"kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965\") pod \"community-operators-9248m\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:09 crc kubenswrapper[4865]: I0126 17:10:09.625789 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:13 crc kubenswrapper[4865]: I0126 17:10:13.417483 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:13 crc kubenswrapper[4865]: I0126 17:10:13.418192 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:13 crc kubenswrapper[4865]: I0126 17:10:13.489297 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:14 crc kubenswrapper[4865]: I0126 17:10:14.366441 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:15 crc kubenswrapper[4865]: I0126 17:10:15.884442 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.262102 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" event={"ID":"97bc7dfb-5005-4c09-9933-2082c2451076","Type":"ContainerStarted","Data":"025db8140d3eafed53e602cba8898dbd5d793df28c39692672a11d7799dc2f08"} Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.262504 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.268446 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mjrzc" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="registry-server" containerID="cri-o://7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257" gracePeriod=2 Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.268941 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" event={"ID":"90825b12-d065-4f87-b04c-835122364369","Type":"ContainerStarted","Data":"abdedeba17158a07ce8f84f8541ba11a4bd10c92818ae2ba40362ab65b6dbac2"} Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.268971 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.287499 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" podStartSLOduration=1.832457169 podStartE2EDuration="12.287479668s" podCreationTimestamp="2026-01-26 17:10:04 +0000 UTC" firstStartedPulling="2026-01-26 17:10:05.400887126 +0000 UTC m=+932.984772873" lastFinishedPulling="2026-01-26 17:10:15.855909785 +0000 UTC m=+943.439795372" observedRunningTime="2026-01-26 17:10:16.284332869 +0000 UTC m=+943.868218466" watchObservedRunningTime="2026-01-26 17:10:16.287479668 +0000 UTC m=+943.871365255" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.335174 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" podStartSLOduration=1.487474301 podStartE2EDuration="11.335155933s" podCreationTimestamp="2026-01-26 17:10:05 +0000 UTC" firstStartedPulling="2026-01-26 17:10:06.02289275 +0000 UTC m=+933.606778347" lastFinishedPulling="2026-01-26 17:10:15.870574392 +0000 UTC m=+943.454459979" observedRunningTime="2026-01-26 17:10:16.329659677 +0000 UTC m=+943.913545264" watchObservedRunningTime="2026-01-26 17:10:16.335155933 +0000 UTC m=+943.919041520" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.482633 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:16 crc kubenswrapper[4865]: W0126 17:10:16.491850 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd90d685_ea36_4bae_9f1a_eac3888bada8.slice/crio-dfbaeef708a9d0b48c6a3608c7d092b07d59f0e7a572cc160922827c0cf8b4a5 WatchSource:0}: Error finding container dfbaeef708a9d0b48c6a3608c7d092b07d59f0e7a572cc160922827c0cf8b4a5: Status 404 returned error can't find the container with id dfbaeef708a9d0b48c6a3608c7d092b07d59f0e7a572cc160922827c0cf8b4a5 Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.694617 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.727043 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities\") pod \"329d43a5-6dba-4b29-833d-affb7c051a82\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.727176 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content\") pod \"329d43a5-6dba-4b29-833d-affb7c051a82\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.727245 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fdqx\" (UniqueName: \"kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx\") pod \"329d43a5-6dba-4b29-833d-affb7c051a82\" (UID: \"329d43a5-6dba-4b29-833d-affb7c051a82\") " Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.728710 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities" (OuterVolumeSpecName: "utilities") pod "329d43a5-6dba-4b29-833d-affb7c051a82" (UID: "329d43a5-6dba-4b29-833d-affb7c051a82"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.734178 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx" (OuterVolumeSpecName: "kube-api-access-9fdqx") pod "329d43a5-6dba-4b29-833d-affb7c051a82" (UID: "329d43a5-6dba-4b29-833d-affb7c051a82"). InnerVolumeSpecName "kube-api-access-9fdqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.781045 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "329d43a5-6dba-4b29-833d-affb7c051a82" (UID: "329d43a5-6dba-4b29-833d-affb7c051a82"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.828931 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.829019 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/329d43a5-6dba-4b29-833d-affb7c051a82-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:16 crc kubenswrapper[4865]: I0126 17:10:16.829038 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fdqx\" (UniqueName: \"kubernetes.io/projected/329d43a5-6dba-4b29-833d-affb7c051a82-kube-api-access-9fdqx\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.274707 4865 generic.go:334] "Generic (PLEG): container finished" podID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerID="1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77" exitCode=0 Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.274752 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerDied","Data":"1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77"} Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.274784 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerStarted","Data":"dfbaeef708a9d0b48c6a3608c7d092b07d59f0e7a572cc160922827c0cf8b4a5"} Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.276964 4865 generic.go:334] "Generic (PLEG): container finished" podID="329d43a5-6dba-4b29-833d-affb7c051a82" containerID="7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257" exitCode=0 Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.277028 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerDied","Data":"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257"} Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.277099 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mjrzc" event={"ID":"329d43a5-6dba-4b29-833d-affb7c051a82","Type":"ContainerDied","Data":"0193671665f9b9fbfdffa884f06ba69b9b8064a31e07b74e1df2e94f0f80544a"} Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.277117 4865 scope.go:117] "RemoveContainer" containerID="7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.277056 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mjrzc" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.297180 4865 scope.go:117] "RemoveContainer" containerID="2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.316202 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.318960 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mjrzc"] Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.334084 4865 scope.go:117] "RemoveContainer" containerID="61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.350031 4865 scope.go:117] "RemoveContainer" containerID="7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257" Jan 26 17:10:17 crc kubenswrapper[4865]: E0126 17:10:17.358938 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257\": container with ID starting with 7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257 not found: ID does not exist" containerID="7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.359010 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257"} err="failed to get container status \"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257\": rpc error: code = NotFound desc = could not find container \"7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257\": container with ID starting with 7985242cc432b88b43ec0f60aa8f14dfaa9caee1f263995e563ce4ae96431257 not found: ID does not exist" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.359051 4865 scope.go:117] "RemoveContainer" containerID="2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7" Jan 26 17:10:17 crc kubenswrapper[4865]: E0126 17:10:17.359535 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7\": container with ID starting with 2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7 not found: ID does not exist" containerID="2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.359577 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7"} err="failed to get container status \"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7\": rpc error: code = NotFound desc = could not find container \"2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7\": container with ID starting with 2e52765a478f241f63cab106ddaa82fe450394a19f50e6f743e2170a151b63e7 not found: ID does not exist" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.359601 4865 scope.go:117] "RemoveContainer" containerID="61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f" Jan 26 17:10:17 crc kubenswrapper[4865]: E0126 17:10:17.359905 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f\": container with ID starting with 61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f not found: ID does not exist" containerID="61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f" Jan 26 17:10:17 crc kubenswrapper[4865]: I0126 17:10:17.359944 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f"} err="failed to get container status \"61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f\": rpc error: code = NotFound desc = could not find container \"61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f\": container with ID starting with 61d6909e722e1851753a65696ed3641b791695cc3e8ea448a48bbf2ead30262f not found: ID does not exist" Jan 26 17:10:18 crc kubenswrapper[4865]: I0126 17:10:18.285156 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerStarted","Data":"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824"} Jan 26 17:10:18 crc kubenswrapper[4865]: I0126 17:10:18.366545 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" path="/var/lib/kubelet/pods/329d43a5-6dba-4b29-833d-affb7c051a82/volumes" Jan 26 17:10:19 crc kubenswrapper[4865]: I0126 17:10:19.294940 4865 generic.go:334] "Generic (PLEG): container finished" podID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerID="19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824" exitCode=0 Jan 26 17:10:19 crc kubenswrapper[4865]: I0126 17:10:19.295180 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerDied","Data":"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824"} Jan 26 17:10:20 crc kubenswrapper[4865]: I0126 17:10:20.305246 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerStarted","Data":"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621"} Jan 26 17:10:20 crc kubenswrapper[4865]: I0126 17:10:20.331139 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9248m" podStartSLOduration=8.913450620999999 podStartE2EDuration="11.331106678s" podCreationTimestamp="2026-01-26 17:10:09 +0000 UTC" firstStartedPulling="2026-01-26 17:10:17.27645036 +0000 UTC m=+944.860335937" lastFinishedPulling="2026-01-26 17:10:19.694106407 +0000 UTC m=+947.277991994" observedRunningTime="2026-01-26 17:10:20.329298707 +0000 UTC m=+947.913184314" watchObservedRunningTime="2026-01-26 17:10:20.331106678 +0000 UTC m=+947.914992255" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.288548 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7sdlc"] Jan 26 17:10:22 crc kubenswrapper[4865]: E0126 17:10:22.290030 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="extract-utilities" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.290144 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="extract-utilities" Jan 26 17:10:22 crc kubenswrapper[4865]: E0126 17:10:22.290243 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="extract-content" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.290324 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="extract-content" Jan 26 17:10:22 crc kubenswrapper[4865]: E0126 17:10:22.290406 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="registry-server" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.290479 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="registry-server" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.290670 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="329d43a5-6dba-4b29-833d-affb7c051a82" containerName="registry-server" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.291812 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.304611 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sdlc"] Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.402077 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-utilities\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.402631 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-catalog-content\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.402788 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr62n\" (UniqueName: \"kubernetes.io/projected/390e456d-a8db-4d14-9b3c-66ab2535484d-kube-api-access-nr62n\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.504597 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr62n\" (UniqueName: \"kubernetes.io/projected/390e456d-a8db-4d14-9b3c-66ab2535484d-kube-api-access-nr62n\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.504731 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-utilities\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.504843 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-catalog-content\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.505282 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-utilities\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.505300 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/390e456d-a8db-4d14-9b3c-66ab2535484d-catalog-content\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.529210 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr62n\" (UniqueName: \"kubernetes.io/projected/390e456d-a8db-4d14-9b3c-66ab2535484d-kube-api-access-nr62n\") pod \"redhat-marketplace-7sdlc\" (UID: \"390e456d-a8db-4d14-9b3c-66ab2535484d\") " pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.616382 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:22 crc kubenswrapper[4865]: I0126 17:10:22.859127 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sdlc"] Jan 26 17:10:23 crc kubenswrapper[4865]: I0126 17:10:23.326233 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sdlc" event={"ID":"390e456d-a8db-4d14-9b3c-66ab2535484d","Type":"ContainerStarted","Data":"7afc6d8faa74ff16009f98f1cd2044b4074c7dac56155b7cdfa93f26992dc2e9"} Jan 26 17:10:24 crc kubenswrapper[4865]: I0126 17:10:24.334192 4865 generic.go:334] "Generic (PLEG): container finished" podID="390e456d-a8db-4d14-9b3c-66ab2535484d" containerID="b96f10ebf76b84060a8a5d299b853038f4bd74a3d90bf0b7b6b4e4c9dd5143a5" exitCode=0 Jan 26 17:10:24 crc kubenswrapper[4865]: I0126 17:10:24.334261 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sdlc" event={"ID":"390e456d-a8db-4d14-9b3c-66ab2535484d","Type":"ContainerDied","Data":"b96f10ebf76b84060a8a5d299b853038f4bd74a3d90bf0b7b6b4e4c9dd5143a5"} Jan 26 17:10:29 crc kubenswrapper[4865]: I0126 17:10:29.626750 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:29 crc kubenswrapper[4865]: I0126 17:10:29.627361 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:29 crc kubenswrapper[4865]: I0126 17:10:29.687596 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:30 crc kubenswrapper[4865]: I0126 17:10:30.387453 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sdlc" event={"ID":"390e456d-a8db-4d14-9b3c-66ab2535484d","Type":"ContainerStarted","Data":"3a3e94875685c89c3fd9ed9e7c80f85910dc122b666e2ea647267b0b8dd95fba"} Jan 26 17:10:30 crc kubenswrapper[4865]: I0126 17:10:30.617104 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:31 crc kubenswrapper[4865]: I0126 17:10:31.396129 4865 generic.go:334] "Generic (PLEG): container finished" podID="390e456d-a8db-4d14-9b3c-66ab2535484d" containerID="3a3e94875685c89c3fd9ed9e7c80f85910dc122b666e2ea647267b0b8dd95fba" exitCode=0 Jan 26 17:10:31 crc kubenswrapper[4865]: I0126 17:10:31.396236 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sdlc" event={"ID":"390e456d-a8db-4d14-9b3c-66ab2535484d","Type":"ContainerDied","Data":"3a3e94875685c89c3fd9ed9e7c80f85910dc122b666e2ea647267b0b8dd95fba"} Jan 26 17:10:32 crc kubenswrapper[4865]: I0126 17:10:32.409158 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sdlc" event={"ID":"390e456d-a8db-4d14-9b3c-66ab2535484d","Type":"ContainerStarted","Data":"b1cf2c3dfa9f51fdba552727c2f2740df0969a2bf06b3a088926ade29253c2ba"} Jan 26 17:10:32 crc kubenswrapper[4865]: I0126 17:10:32.441188 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7sdlc" podStartSLOduration=2.951900777 podStartE2EDuration="10.441169274s" podCreationTimestamp="2026-01-26 17:10:22 +0000 UTC" firstStartedPulling="2026-01-26 17:10:24.33928366 +0000 UTC m=+951.923169247" lastFinishedPulling="2026-01-26 17:10:31.828552157 +0000 UTC m=+959.412437744" observedRunningTime="2026-01-26 17:10:32.435100932 +0000 UTC m=+960.018986519" watchObservedRunningTime="2026-01-26 17:10:32.441169274 +0000 UTC m=+960.025054861" Jan 26 17:10:32 crc kubenswrapper[4865]: I0126 17:10:32.617101 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:32 crc kubenswrapper[4865]: I0126 17:10:32.617167 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:32 crc kubenswrapper[4865]: I0126 17:10:32.880539 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.414111 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9248m" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="registry-server" containerID="cri-o://71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621" gracePeriod=2 Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.691608 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-7sdlc" podUID="390e456d-a8db-4d14-9b3c-66ab2535484d" containerName="registry-server" probeResult="failure" output=< Jan 26 17:10:33 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:10:33 crc kubenswrapper[4865]: > Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.778262 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.959364 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content\") pod \"bd90d685-ea36-4bae-9f1a-eac3888bada8\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.959535 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p965\" (UniqueName: \"kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965\") pod \"bd90d685-ea36-4bae-9f1a-eac3888bada8\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.959574 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities\") pod \"bd90d685-ea36-4bae-9f1a-eac3888bada8\" (UID: \"bd90d685-ea36-4bae-9f1a-eac3888bada8\") " Jan 26 17:10:33 crc kubenswrapper[4865]: I0126 17:10:33.961065 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities" (OuterVolumeSpecName: "utilities") pod "bd90d685-ea36-4bae-9f1a-eac3888bada8" (UID: "bd90d685-ea36-4bae-9f1a-eac3888bada8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.047564 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965" (OuterVolumeSpecName: "kube-api-access-4p965") pod "bd90d685-ea36-4bae-9f1a-eac3888bada8" (UID: "bd90d685-ea36-4bae-9f1a-eac3888bada8"). InnerVolumeSpecName "kube-api-access-4p965". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.061277 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p965\" (UniqueName: \"kubernetes.io/projected/bd90d685-ea36-4bae-9f1a-eac3888bada8-kube-api-access-4p965\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.061312 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.089564 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bd90d685-ea36-4bae-9f1a-eac3888bada8" (UID: "bd90d685-ea36-4bae-9f1a-eac3888bada8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.162903 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bd90d685-ea36-4bae-9f1a-eac3888bada8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.421170 4865 generic.go:334] "Generic (PLEG): container finished" podID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerID="71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621" exitCode=0 Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.421208 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerDied","Data":"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621"} Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.421248 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9248m" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.421273 4865 scope.go:117] "RemoveContainer" containerID="71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.421259 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9248m" event={"ID":"bd90d685-ea36-4bae-9f1a-eac3888bada8","Type":"ContainerDied","Data":"dfbaeef708a9d0b48c6a3608c7d092b07d59f0e7a572cc160922827c0cf8b4a5"} Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.440275 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.444506 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9248m"] Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.445705 4865 scope.go:117] "RemoveContainer" containerID="19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.465332 4865 scope.go:117] "RemoveContainer" containerID="1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.512073 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.512152 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.527750 4865 scope.go:117] "RemoveContainer" containerID="71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621" Jan 26 17:10:34 crc kubenswrapper[4865]: E0126 17:10:34.528466 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621\": container with ID starting with 71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621 not found: ID does not exist" containerID="71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.528531 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621"} err="failed to get container status \"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621\": rpc error: code = NotFound desc = could not find container \"71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621\": container with ID starting with 71a2021212b2d74afaa33fff7cc8cab58a804df613ac75e02409ace6a4f0d621 not found: ID does not exist" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.528558 4865 scope.go:117] "RemoveContainer" containerID="19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824" Jan 26 17:10:34 crc kubenswrapper[4865]: E0126 17:10:34.528921 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824\": container with ID starting with 19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824 not found: ID does not exist" containerID="19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.529023 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824"} err="failed to get container status \"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824\": rpc error: code = NotFound desc = could not find container \"19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824\": container with ID starting with 19c7f59731e58db4684605c1b204c1ccf4d748292dfe3a10a800e068e8c05824 not found: ID does not exist" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.529066 4865 scope.go:117] "RemoveContainer" containerID="1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77" Jan 26 17:10:34 crc kubenswrapper[4865]: E0126 17:10:34.529594 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77\": container with ID starting with 1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77 not found: ID does not exist" containerID="1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77" Jan 26 17:10:34 crc kubenswrapper[4865]: I0126 17:10:34.529655 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77"} err="failed to get container status \"1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77\": rpc error: code = NotFound desc = could not find container \"1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77\": container with ID starting with 1d9e6a1bc782cb7a8900c29349581f7e67681b15433a450968b8678517fdcb77 not found: ID does not exist" Jan 26 17:10:35 crc kubenswrapper[4865]: I0126 17:10:35.525868 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-74c965d86f-87jrj" Jan 26 17:10:36 crc kubenswrapper[4865]: I0126 17:10:36.365237 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" path="/var/lib/kubelet/pods/bd90d685-ea36-4bae-9f1a-eac3888bada8/volumes" Jan 26 17:10:42 crc kubenswrapper[4865]: I0126 17:10:42.658610 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:42 crc kubenswrapper[4865]: I0126 17:10:42.701367 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7sdlc" Jan 26 17:10:44 crc kubenswrapper[4865]: I0126 17:10:44.903927 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sdlc"] Jan 26 17:10:45 crc kubenswrapper[4865]: I0126 17:10:45.484375 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:10:45 crc kubenswrapper[4865]: I0126 17:10:45.484747 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8vh55" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="registry-server" containerID="cri-o://3c8bacee891a9286e210100054040e6c21d07cb2065cee678ff0c2bcfc77f43f" gracePeriod=2 Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.522439 4865 generic.go:334] "Generic (PLEG): container finished" podID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerID="3c8bacee891a9286e210100054040e6c21d07cb2065cee678ff0c2bcfc77f43f" exitCode=0 Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.522527 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerDied","Data":"3c8bacee891a9286e210100054040e6c21d07cb2065cee678ff0c2bcfc77f43f"} Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.714561 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.832571 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content\") pod \"8e7fa29d-427b-4a11-88db-190d64b03f52\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.832661 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wmbc\" (UniqueName: \"kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc\") pod \"8e7fa29d-427b-4a11-88db-190d64b03f52\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.832789 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities\") pod \"8e7fa29d-427b-4a11-88db-190d64b03f52\" (UID: \"8e7fa29d-427b-4a11-88db-190d64b03f52\") " Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.834057 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities" (OuterVolumeSpecName: "utilities") pod "8e7fa29d-427b-4a11-88db-190d64b03f52" (UID: "8e7fa29d-427b-4a11-88db-190d64b03f52"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.843554 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc" (OuterVolumeSpecName: "kube-api-access-6wmbc") pod "8e7fa29d-427b-4a11-88db-190d64b03f52" (UID: "8e7fa29d-427b-4a11-88db-190d64b03f52"). InnerVolumeSpecName "kube-api-access-6wmbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.862907 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8e7fa29d-427b-4a11-88db-190d64b03f52" (UID: "8e7fa29d-427b-4a11-88db-190d64b03f52"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.968339 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wmbc\" (UniqueName: \"kubernetes.io/projected/8e7fa29d-427b-4a11-88db-190d64b03f52-kube-api-access-6wmbc\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.968420 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:48 crc kubenswrapper[4865]: I0126 17:10:48.968431 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8e7fa29d-427b-4a11-88db-190d64b03f52-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.533613 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8vh55" event={"ID":"8e7fa29d-427b-4a11-88db-190d64b03f52","Type":"ContainerDied","Data":"8a9757b23525510655e453955470d1eeb8044df3b2a5daf7eb466eb1e765c1a7"} Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.533700 4865 scope.go:117] "RemoveContainer" containerID="3c8bacee891a9286e210100054040e6c21d07cb2065cee678ff0c2bcfc77f43f" Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.533706 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8vh55" Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.552096 4865 scope.go:117] "RemoveContainer" containerID="e7a759f74ce8958cb015eb80a4f85a3460da82913aac96aec2481dd6ad214297" Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.572656 4865 scope.go:117] "RemoveContainer" containerID="4b05e4937b3ad36258e87b5d78f3700d05e622deb55d406dd916246548bc98e5" Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.589782 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:10:49 crc kubenswrapper[4865]: I0126 17:10:49.598382 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8vh55"] Jan 26 17:10:50 crc kubenswrapper[4865]: I0126 17:10:50.365729 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" path="/var/lib/kubelet/pods/8e7fa29d-427b-4a11-88db-190d64b03f52/volumes" Jan 26 17:10:54 crc kubenswrapper[4865]: I0126 17:10:54.963604 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-bcd7fd588-487fc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.724949 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-l47t7"] Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725358 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="extract-utilities" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725449 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="extract-utilities" Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725474 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="extract-content" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725493 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="extract-content" Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725509 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="extract-content" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725522 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="extract-content" Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725535 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725546 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725560 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="extract-utilities" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725569 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="extract-utilities" Jan 26 17:10:55 crc kubenswrapper[4865]: E0126 17:10:55.725634 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725646 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725782 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e7fa29d-427b-4a11-88db-190d64b03f52" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.725807 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd90d685-ea36-4bae-9f1a-eac3888bada8" containerName="registry-server" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.728762 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.731491 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.740149 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5"] Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.740187 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.741187 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.746397 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-mfkpx" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.746402 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.806726 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5"] Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.859904 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msrjf\" (UniqueName: \"kubernetes.io/projected/f006cb83-5c07-4da0-8107-ffc503cc5995-kube-api-access-msrjf\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860022 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-metrics\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860381 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-conf\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860468 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063dfe-fb0e-4801-863c-01e696e1a391-metrics-certs\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860593 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w92hc\" (UniqueName: \"kubernetes.io/projected/39063dfe-fb0e-4801-863c-01e696e1a391-kube-api-access-w92hc\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860661 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-reloader\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860685 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f006cb83-5c07-4da0-8107-ffc503cc5995-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860799 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-sockets\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.860833 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/39063dfe-fb0e-4801-863c-01e696e1a391-frr-startup\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.864053 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-l6hlc"] Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.865358 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-l6hlc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.870249 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-28n7h" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.870756 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.870952 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.871184 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.887402 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-9vhzj"] Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.888598 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.893870 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.910371 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-9vhzj"] Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962050 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metrics-certs\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962128 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-conf\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962160 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063dfe-fb0e-4801-863c-01e696e1a391-metrics-certs\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962335 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w92hc\" (UniqueName: \"kubernetes.io/projected/39063dfe-fb0e-4801-863c-01e696e1a391-kube-api-access-w92hc\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962423 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-reloader\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962455 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f006cb83-5c07-4da0-8107-ffc503cc5995-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962549 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-sockets\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962580 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/39063dfe-fb0e-4801-863c-01e696e1a391-frr-startup\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962647 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metallb-excludel2\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962677 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-conf\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962692 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msrjf\" (UniqueName: \"kubernetes.io/projected/f006cb83-5c07-4da0-8107-ffc503cc5995-kube-api-access-msrjf\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962844 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-metrics\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962880 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncmkz\" (UniqueName: \"kubernetes.io/projected/e84d0ee2-3ac4-4418-831c-4c7c53f76868-kube-api-access-ncmkz\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962846 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-reloader\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.962954 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.963047 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-frr-sockets\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.963291 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/39063dfe-fb0e-4801-863c-01e696e1a391-metrics\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.964213 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/39063dfe-fb0e-4801-863c-01e696e1a391-frr-startup\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.970704 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39063dfe-fb0e-4801-863c-01e696e1a391-metrics-certs\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.971243 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f006cb83-5c07-4da0-8107-ffc503cc5995-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.979932 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w92hc\" (UniqueName: \"kubernetes.io/projected/39063dfe-fb0e-4801-863c-01e696e1a391-kube-api-access-w92hc\") pod \"frr-k8s-l47t7\" (UID: \"39063dfe-fb0e-4801-863c-01e696e1a391\") " pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:55 crc kubenswrapper[4865]: I0126 17:10:55.983142 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msrjf\" (UniqueName: \"kubernetes.io/projected/f006cb83-5c07-4da0-8107-ffc503cc5995-kube-api-access-msrjf\") pod \"frr-k8s-webhook-server-7df86c4f6c-spxc5\" (UID: \"f006cb83-5c07-4da0-8107-ffc503cc5995\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064451 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-metrics-certs\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064509 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metallb-excludel2\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064627 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncmkz\" (UniqueName: \"kubernetes.io/projected/e84d0ee2-3ac4-4418-831c-4c7c53f76868-kube-api-access-ncmkz\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064708 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-cert\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064738 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.064776 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metrics-certs\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: E0126 17:10:56.064954 4865 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 26 17:10:56 crc kubenswrapper[4865]: E0126 17:10:56.065105 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist podName:e84d0ee2-3ac4-4418-831c-4c7c53f76868 nodeName:}" failed. No retries permitted until 2026-01-26 17:10:56.565067837 +0000 UTC m=+984.148953624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist") pod "speaker-l6hlc" (UID: "e84d0ee2-3ac4-4418-831c-4c7c53f76868") : secret "metallb-memberlist" not found Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.065170 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvzm9\" (UniqueName: \"kubernetes.io/projected/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-kube-api-access-rvzm9\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.065342 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.066010 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metallb-excludel2\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.074806 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.077476 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-metrics-certs\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.142876 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncmkz\" (UniqueName: \"kubernetes.io/projected/e84d0ee2-3ac4-4418-831c-4c7c53f76868-kube-api-access-ncmkz\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.166817 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvzm9\" (UniqueName: \"kubernetes.io/projected/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-kube-api-access-rvzm9\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.166882 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-metrics-certs\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.166934 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-cert\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.170970 4865 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.174333 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-metrics-certs\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.185371 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-cert\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.210565 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvzm9\" (UniqueName: \"kubernetes.io/projected/51718ea3-b63c-4ce5-85a1-73f5e757f6b9-kube-api-access-rvzm9\") pod \"controller-6968d8fdc4-9vhzj\" (UID: \"51718ea3-b63c-4ce5-85a1-73f5e757f6b9\") " pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.505725 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.575004 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:56 crc kubenswrapper[4865]: E0126 17:10:56.575231 4865 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 26 17:10:56 crc kubenswrapper[4865]: E0126 17:10:56.575299 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist podName:e84d0ee2-3ac4-4418-831c-4c7c53f76868 nodeName:}" failed. No retries permitted until 2026-01-26 17:10:57.575278184 +0000 UTC m=+985.159163781 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist") pod "speaker-l6hlc" (UID: "e84d0ee2-3ac4-4418-831c-4c7c53f76868") : secret "metallb-memberlist" not found Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.584606 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"b65d1d119c127442c5d7a6ec60b8c064dccaaa77b8d3c9ee7b8dd04d256b5235"} Jan 26 17:10:56 crc kubenswrapper[4865]: I0126 17:10:56.675047 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5"] Jan 26 17:10:56 crc kubenswrapper[4865]: W0126 17:10:56.682378 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf006cb83_5c07_4da0_8107_ffc503cc5995.slice/crio-56a79897aa30b730b0122148408f8fbd2be78f1e7f729974c975c0a8d1125126 WatchSource:0}: Error finding container 56a79897aa30b730b0122148408f8fbd2be78f1e7f729974c975c0a8d1125126: Status 404 returned error can't find the container with id 56a79897aa30b730b0122148408f8fbd2be78f1e7f729974c975c0a8d1125126 Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.020797 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-9vhzj"] Jan 26 17:10:57 crc kubenswrapper[4865]: W0126 17:10:57.030288 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51718ea3_b63c_4ce5_85a1_73f5e757f6b9.slice/crio-f43c705e478173beb140ca8a50866e41a9845bdd12672d7967e9723d230c9126 WatchSource:0}: Error finding container f43c705e478173beb140ca8a50866e41a9845bdd12672d7967e9723d230c9126: Status 404 returned error can't find the container with id f43c705e478173beb140ca8a50866e41a9845bdd12672d7967e9723d230c9126 Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.592814 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" event={"ID":"f006cb83-5c07-4da0-8107-ffc503cc5995","Type":"ContainerStarted","Data":"56a79897aa30b730b0122148408f8fbd2be78f1e7f729974c975c0a8d1125126"} Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.596768 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-9vhzj" event={"ID":"51718ea3-b63c-4ce5-85a1-73f5e757f6b9","Type":"ContainerStarted","Data":"8290658a8c35f6d32aec08c5e8f850d0d11c4208de3bede49584c18f83e0731c"} Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.596820 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-9vhzj" event={"ID":"51718ea3-b63c-4ce5-85a1-73f5e757f6b9","Type":"ContainerStarted","Data":"83b542dab368b5a0cb6fbb558b535557b7503e9a3b3805844f19e1bff52b25df"} Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.596830 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-9vhzj" event={"ID":"51718ea3-b63c-4ce5-85a1-73f5e757f6b9","Type":"ContainerStarted","Data":"f43c705e478173beb140ca8a50866e41a9845bdd12672d7967e9723d230c9126"} Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.597833 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.598469 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.613124 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e84d0ee2-3ac4-4418-831c-4c7c53f76868-memberlist\") pod \"speaker-l6hlc\" (UID: \"e84d0ee2-3ac4-4418-831c-4c7c53f76868\") " pod="metallb-system/speaker-l6hlc" Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.629766 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-9vhzj" podStartSLOduration=2.629709455 podStartE2EDuration="2.629709455s" podCreationTimestamp="2026-01-26 17:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:10:57.622270264 +0000 UTC m=+985.206155861" watchObservedRunningTime="2026-01-26 17:10:57.629709455 +0000 UTC m=+985.213595042" Jan 26 17:10:57 crc kubenswrapper[4865]: I0126 17:10:57.688581 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-l6hlc" Jan 26 17:10:57 crc kubenswrapper[4865]: W0126 17:10:57.739870 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode84d0ee2_3ac4_4418_831c_4c7c53f76868.slice/crio-35a88b8f2757100364d4c65f7454ece90c6f3b24a802980f1a0d1327c33a6d84 WatchSource:0}: Error finding container 35a88b8f2757100364d4c65f7454ece90c6f3b24a802980f1a0d1327c33a6d84: Status 404 returned error can't find the container with id 35a88b8f2757100364d4c65f7454ece90c6f3b24a802980f1a0d1327c33a6d84 Jan 26 17:10:58 crc kubenswrapper[4865]: I0126 17:10:58.634602 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-l6hlc" event={"ID":"e84d0ee2-3ac4-4418-831c-4c7c53f76868","Type":"ContainerStarted","Data":"bbb5c4fb7bd3aa6baaa80e6804dcb94ed4487da695c91c9b65f229f793cd16c7"} Jan 26 17:10:58 crc kubenswrapper[4865]: I0126 17:10:58.636154 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-l6hlc" event={"ID":"e84d0ee2-3ac4-4418-831c-4c7c53f76868","Type":"ContainerStarted","Data":"35a88b8f2757100364d4c65f7454ece90c6f3b24a802980f1a0d1327c33a6d84"} Jan 26 17:10:59 crc kubenswrapper[4865]: I0126 17:10:59.653291 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-l6hlc" event={"ID":"e84d0ee2-3ac4-4418-831c-4c7c53f76868","Type":"ContainerStarted","Data":"3fbdde65b521e3a4565d7a5da6569f01c82d928b952a05aedaba068fc99de6c6"} Jan 26 17:10:59 crc kubenswrapper[4865]: I0126 17:10:59.654427 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-l6hlc" Jan 26 17:10:59 crc kubenswrapper[4865]: I0126 17:10:59.686395 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-l6hlc" podStartSLOduration=4.686370806 podStartE2EDuration="4.686370806s" podCreationTimestamp="2026-01-26 17:10:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:10:59.683218997 +0000 UTC m=+987.267104594" watchObservedRunningTime="2026-01-26 17:10:59.686370806 +0000 UTC m=+987.270256393" Jan 26 17:11:04 crc kubenswrapper[4865]: I0126 17:11:04.512213 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:11:04 crc kubenswrapper[4865]: I0126 17:11:04.512516 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:11:06 crc kubenswrapper[4865]: I0126 17:11:06.730338 4865 generic.go:334] "Generic (PLEG): container finished" podID="39063dfe-fb0e-4801-863c-01e696e1a391" containerID="38a189ddbc82bd47f3dabcf4d2e7dc89a09d93158bcf346b778ec795d3fb74e8" exitCode=0 Jan 26 17:11:06 crc kubenswrapper[4865]: I0126 17:11:06.730385 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerDied","Data":"38a189ddbc82bd47f3dabcf4d2e7dc89a09d93158bcf346b778ec795d3fb74e8"} Jan 26 17:11:06 crc kubenswrapper[4865]: I0126 17:11:06.732570 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" event={"ID":"f006cb83-5c07-4da0-8107-ffc503cc5995","Type":"ContainerStarted","Data":"ba5191ee2e5f7d40eeeadd319b35f64e1e4af18f74d82e3a3ed7d87a0768a7ef"} Jan 26 17:11:06 crc kubenswrapper[4865]: I0126 17:11:06.733016 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:11:06 crc kubenswrapper[4865]: I0126 17:11:06.776688 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" podStartSLOduration=2.169201252 podStartE2EDuration="11.776665167s" podCreationTimestamp="2026-01-26 17:10:55 +0000 UTC" firstStartedPulling="2026-01-26 17:10:56.689656115 +0000 UTC m=+984.273541702" lastFinishedPulling="2026-01-26 17:11:06.29712003 +0000 UTC m=+993.881005617" observedRunningTime="2026-01-26 17:11:06.775251126 +0000 UTC m=+994.359136713" watchObservedRunningTime="2026-01-26 17:11:06.776665167 +0000 UTC m=+994.360550754" Jan 26 17:11:07 crc kubenswrapper[4865]: I0126 17:11:07.740617 4865 generic.go:334] "Generic (PLEG): container finished" podID="39063dfe-fb0e-4801-863c-01e696e1a391" containerID="ca014f5be624d4cb1da56333fed675436a8000da877f8ea6bc7e8a3466831d8e" exitCode=0 Jan 26 17:11:07 crc kubenswrapper[4865]: I0126 17:11:07.740700 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerDied","Data":"ca014f5be624d4cb1da56333fed675436a8000da877f8ea6bc7e8a3466831d8e"} Jan 26 17:11:08 crc kubenswrapper[4865]: I0126 17:11:08.749943 4865 generic.go:334] "Generic (PLEG): container finished" podID="39063dfe-fb0e-4801-863c-01e696e1a391" containerID="c2db48eb4ea936ba2b360a785855f17061b04ca97ab1395ff48ccb6add10f4f5" exitCode=0 Jan 26 17:11:08 crc kubenswrapper[4865]: I0126 17:11:08.750039 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerDied","Data":"c2db48eb4ea936ba2b360a785855f17061b04ca97ab1395ff48ccb6add10f4f5"} Jan 26 17:11:09 crc kubenswrapper[4865]: I0126 17:11:09.761487 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"c46882ef1ee07d7af01675d3c6712eaf25926c5c4fdaadccb70d4fb7c671ebee"} Jan 26 17:11:09 crc kubenswrapper[4865]: I0126 17:11:09.761818 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"6abe3d7bb8c5be40cc4885c53099e168cb7bf1531aed62e6d85b90565433bf2f"} Jan 26 17:11:09 crc kubenswrapper[4865]: I0126 17:11:09.761832 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"05047665d241c47cf3d1e350541f1d147226f0e8c181e4c1aeca7683574a2437"} Jan 26 17:11:09 crc kubenswrapper[4865]: I0126 17:11:09.761841 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"60a42e88b0d90401c24abc369e281d9423d936feca53f203593d7a9a92e07064"} Jan 26 17:11:10 crc kubenswrapper[4865]: I0126 17:11:10.773177 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"b5255510dcf2d2d02fccacca389a04071e58094829983151533cd7d537bdf70e"} Jan 26 17:11:10 crc kubenswrapper[4865]: I0126 17:11:10.773227 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-l47t7" event={"ID":"39063dfe-fb0e-4801-863c-01e696e1a391","Type":"ContainerStarted","Data":"3bfb5388e859687d92e655385923d39b0285868a0afcef44339fb378f42bae31"} Jan 26 17:11:10 crc kubenswrapper[4865]: I0126 17:11:10.773510 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:11:10 crc kubenswrapper[4865]: I0126 17:11:10.801167 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-l47t7" podStartSLOduration=5.8911032290000005 podStartE2EDuration="15.801142252s" podCreationTimestamp="2026-01-26 17:10:55 +0000 UTC" firstStartedPulling="2026-01-26 17:10:56.353889904 +0000 UTC m=+983.937775491" lastFinishedPulling="2026-01-26 17:11:06.263928927 +0000 UTC m=+993.847814514" observedRunningTime="2026-01-26 17:11:10.797398626 +0000 UTC m=+998.381284213" watchObservedRunningTime="2026-01-26 17:11:10.801142252 +0000 UTC m=+998.385027839" Jan 26 17:11:11 crc kubenswrapper[4865]: I0126 17:11:11.066321 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:11:11 crc kubenswrapper[4865]: I0126 17:11:11.132731 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:11:16 crc kubenswrapper[4865]: I0126 17:11:16.085181 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-spxc5" Jan 26 17:11:16 crc kubenswrapper[4865]: I0126 17:11:16.511156 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-9vhzj" Jan 26 17:11:17 crc kubenswrapper[4865]: I0126 17:11:17.694219 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-l6hlc" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.231850 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq"] Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.233479 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.236072 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.249752 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq"] Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.424426 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.424506 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.424724 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw9bk\" (UniqueName: \"kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.526068 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.526158 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.526204 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw9bk\" (UniqueName: \"kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.527194 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.527463 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.545638 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw9bk\" (UniqueName: \"kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:19 crc kubenswrapper[4865]: I0126 17:11:19.559592 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:20 crc kubenswrapper[4865]: I0126 17:11:20.115451 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq"] Jan 26 17:11:20 crc kubenswrapper[4865]: W0126 17:11:20.128254 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb6af1b5_d77d_4e65_ba51_65c35fe73bd3.slice/crio-cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433 WatchSource:0}: Error finding container cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433: Status 404 returned error can't find the container with id cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433 Jan 26 17:11:20 crc kubenswrapper[4865]: I0126 17:11:20.841858 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" event={"ID":"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3","Type":"ContainerStarted","Data":"cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433"} Jan 26 17:11:21 crc kubenswrapper[4865]: I0126 17:11:21.849490 4865 generic.go:334] "Generic (PLEG): container finished" podID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerID="00151c46a1a134edff956a1cc0f8bb522d9316e6d53b299835c37167329467dc" exitCode=0 Jan 26 17:11:21 crc kubenswrapper[4865]: I0126 17:11:21.849534 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" event={"ID":"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3","Type":"ContainerDied","Data":"00151c46a1a134edff956a1cc0f8bb522d9316e6d53b299835c37167329467dc"} Jan 26 17:11:25 crc kubenswrapper[4865]: I0126 17:11:25.874532 4865 generic.go:334] "Generic (PLEG): container finished" podID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerID="51b4cdc68554e71b7c2ebd5feef5b773c32ced1ed6037ee5ba6a7108c5132cff" exitCode=0 Jan 26 17:11:25 crc kubenswrapper[4865]: I0126 17:11:25.874600 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" event={"ID":"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3","Type":"ContainerDied","Data":"51b4cdc68554e71b7c2ebd5feef5b773c32ced1ed6037ee5ba6a7108c5132cff"} Jan 26 17:11:26 crc kubenswrapper[4865]: I0126 17:11:26.068553 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-l47t7" Jan 26 17:11:26 crc kubenswrapper[4865]: I0126 17:11:26.883727 4865 generic.go:334] "Generic (PLEG): container finished" podID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerID="373b6de008a5d64b8e6bc0ebce056bd8fe2e9d895d7c89cb372ae7b38e4aefa4" exitCode=0 Jan 26 17:11:26 crc kubenswrapper[4865]: I0126 17:11:26.883768 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" event={"ID":"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3","Type":"ContainerDied","Data":"373b6de008a5d64b8e6bc0ebce056bd8fe2e9d895d7c89cb372ae7b38e4aefa4"} Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.180192 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.309690 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle\") pod \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.309778 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw9bk\" (UniqueName: \"kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk\") pod \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.309905 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util\") pod \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\" (UID: \"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3\") " Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.311200 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle" (OuterVolumeSpecName: "bundle") pod "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" (UID: "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.316825 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk" (OuterVolumeSpecName: "kube-api-access-xw9bk") pod "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" (UID: "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3"). InnerVolumeSpecName "kube-api-access-xw9bk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.325985 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util" (OuterVolumeSpecName: "util") pod "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" (UID: "cb6af1b5-d77d-4e65-ba51-65c35fe73bd3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.412453 4865 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-util\") on node \"crc\" DevicePath \"\"" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.412519 4865 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.412536 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw9bk\" (UniqueName: \"kubernetes.io/projected/cb6af1b5-d77d-4e65-ba51-65c35fe73bd3-kube-api-access-xw9bk\") on node \"crc\" DevicePath \"\"" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.898894 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" event={"ID":"cb6af1b5-d77d-4e65-ba51-65c35fe73bd3","Type":"ContainerDied","Data":"cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433"} Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.899211 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbe4901272b6983ad5e04abb9513642f557504f7ab82805d8f8473c3e7c39433" Jan 26 17:11:28 crc kubenswrapper[4865]: I0126 17:11:28.899017 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931a4zdmq" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.719221 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl"] Jan 26 17:11:31 crc kubenswrapper[4865]: E0126 17:11:31.719934 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="util" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.719955 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="util" Jan 26 17:11:31 crc kubenswrapper[4865]: E0126 17:11:31.719973 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="pull" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.719980 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="pull" Jan 26 17:11:31 crc kubenswrapper[4865]: E0126 17:11:31.720015 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="extract" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.720024 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="extract" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.720169 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6af1b5-d77d-4e65-ba51-65c35fe73bd3" containerName="extract" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.720631 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.722418 4865 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-n9bx8" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.722525 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.723040 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.726159 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcbq5\" (UniqueName: \"kubernetes.io/projected/a639bef9-8db9-4c5e-88d4-d3396591dfef-kube-api-access-hcbq5\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.726238 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a639bef9-8db9-4c5e-88d4-d3396591dfef-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.773047 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl"] Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.827453 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcbq5\" (UniqueName: \"kubernetes.io/projected/a639bef9-8db9-4c5e-88d4-d3396591dfef-kube-api-access-hcbq5\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.827741 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a639bef9-8db9-4c5e-88d4-d3396591dfef-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.828289 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/a639bef9-8db9-4c5e-88d4-d3396591dfef-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:31 crc kubenswrapper[4865]: I0126 17:11:31.851110 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcbq5\" (UniqueName: \"kubernetes.io/projected/a639bef9-8db9-4c5e-88d4-d3396591dfef-kube-api-access-hcbq5\") pod \"cert-manager-operator-controller-manager-64cf6dff88-ccfdl\" (UID: \"a639bef9-8db9-4c5e-88d4-d3396591dfef\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:32 crc kubenswrapper[4865]: I0126 17:11:32.040373 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" Jan 26 17:11:32 crc kubenswrapper[4865]: I0126 17:11:32.549315 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl"] Jan 26 17:11:32 crc kubenswrapper[4865]: I0126 17:11:32.938151 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" event={"ID":"a639bef9-8db9-4c5e-88d4-d3396591dfef","Type":"ContainerStarted","Data":"09f260b1bf7e365534946594abe0d97a027790a9d2abdd0f00a51a0d1d54b419"} Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.512604 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.512683 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.512737 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.513507 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.513579 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c" gracePeriod=600 Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.957903 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c" exitCode=0 Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.957970 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c"} Jan 26 17:11:34 crc kubenswrapper[4865]: I0126 17:11:34.958538 4865 scope.go:117] "RemoveContainer" containerID="31e35f2ae088b8909c6af16afb0dd4198f471cee550ac52a7df7e61577a7b722" Jan 26 17:11:35 crc kubenswrapper[4865]: I0126 17:11:35.967578 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e"} Jan 26 17:11:43 crc kubenswrapper[4865]: I0126 17:11:43.026477 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" event={"ID":"a639bef9-8db9-4c5e-88d4-d3396591dfef","Type":"ContainerStarted","Data":"fd05f57567d7b06d09de0a3a63b5ce6878d4ca8601a508db7179b47354784f86"} Jan 26 17:11:43 crc kubenswrapper[4865]: I0126 17:11:43.051572 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-ccfdl" podStartSLOduration=2.49121123 podStartE2EDuration="12.051549307s" podCreationTimestamp="2026-01-26 17:11:31 +0000 UTC" firstStartedPulling="2026-01-26 17:11:32.56355054 +0000 UTC m=+1020.147436137" lastFinishedPulling="2026-01-26 17:11:42.123888627 +0000 UTC m=+1029.707774214" observedRunningTime="2026-01-26 17:11:43.046823152 +0000 UTC m=+1030.630708769" watchObservedRunningTime="2026-01-26 17:11:43.051549307 +0000 UTC m=+1030.635434904" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.690905 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd"] Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.692534 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.694382 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.694475 4865 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-bvdm7" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.694771 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.699340 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd"] Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.830519 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lj65\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-kube-api-access-5lj65\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.830605 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.931508 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.931667 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lj65\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-kube-api-access-5lj65\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.951927 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:49 crc kubenswrapper[4865]: I0126 17:11:49.952010 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lj65\" (UniqueName: \"kubernetes.io/projected/37ecc18b-2f88-46f4-b925-bf5c7be97cde-kube-api-access-5lj65\") pod \"cert-manager-cainjector-855d9ccff4-wbdpd\" (UID: \"37ecc18b-2f88-46f4-b925-bf5c7be97cde\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:50 crc kubenswrapper[4865]: I0126 17:11:50.013304 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" Jan 26 17:11:50 crc kubenswrapper[4865]: I0126 17:11:50.620685 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd"] Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.080714 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" event={"ID":"37ecc18b-2f88-46f4-b925-bf5c7be97cde","Type":"ContainerStarted","Data":"7457aad41be22f4673ae64edf207c07505933f9261932864f87e24fbe1e1353b"} Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.630042 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbqqg"] Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.631174 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.634032 4865 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-2srl5" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.640743 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbqqg"] Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.757048 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.757145 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w26wf\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-kube-api-access-w26wf\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.858385 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.858477 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w26wf\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-kube-api-access-w26wf\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.883909 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.886536 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w26wf\" (UniqueName: \"kubernetes.io/projected/8570c71c-0981-4923-a42f-bf38bcd70b3a-kube-api-access-w26wf\") pod \"cert-manager-webhook-f4fb5df64-jbqqg\" (UID: \"8570c71c-0981-4923-a42f-bf38bcd70b3a\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:51 crc kubenswrapper[4865]: I0126 17:11:51.948295 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:11:52 crc kubenswrapper[4865]: I0126 17:11:52.342188 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-jbqqg"] Jan 26 17:11:53 crc kubenswrapper[4865]: I0126 17:11:53.135430 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" event={"ID":"8570c71c-0981-4923-a42f-bf38bcd70b3a","Type":"ContainerStarted","Data":"a63b0153c47e888fc776a4414a7c61f864de4223caa507cdfc9995953eb2b2cc"} Jan 26 17:12:04 crc kubenswrapper[4865]: I0126 17:12:04.336795 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" event={"ID":"37ecc18b-2f88-46f4-b925-bf5c7be97cde","Type":"ContainerStarted","Data":"88f8385102f3f8ed298a9646e736b026e8dc6371936eea832c8b9ea670768f70"} Jan 26 17:12:04 crc kubenswrapper[4865]: I0126 17:12:04.341125 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" event={"ID":"8570c71c-0981-4923-a42f-bf38bcd70b3a","Type":"ContainerStarted","Data":"a7a2e60436201e80fa493589cc7eb288415088f58df3a3695ced23acfcd628b6"} Jan 26 17:12:04 crc kubenswrapper[4865]: I0126 17:12:04.341743 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:12:04 crc kubenswrapper[4865]: I0126 17:12:04.358290 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-wbdpd" podStartSLOduration=1.912288181 podStartE2EDuration="15.358269139s" podCreationTimestamp="2026-01-26 17:11:49 +0000 UTC" firstStartedPulling="2026-01-26 17:11:50.632710727 +0000 UTC m=+1038.216596314" lastFinishedPulling="2026-01-26 17:12:04.078691675 +0000 UTC m=+1051.662577272" observedRunningTime="2026-01-26 17:12:04.353752381 +0000 UTC m=+1051.937637968" watchObservedRunningTime="2026-01-26 17:12:04.358269139 +0000 UTC m=+1051.942154726" Jan 26 17:12:04 crc kubenswrapper[4865]: I0126 17:12:04.375731 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" podStartSLOduration=1.650563274 podStartE2EDuration="13.375708475s" podCreationTimestamp="2026-01-26 17:11:51 +0000 UTC" firstStartedPulling="2026-01-26 17:11:52.359146923 +0000 UTC m=+1039.943032510" lastFinishedPulling="2026-01-26 17:12:04.084292134 +0000 UTC m=+1051.668177711" observedRunningTime="2026-01-26 17:12:04.375507819 +0000 UTC m=+1051.959393406" watchObservedRunningTime="2026-01-26 17:12:04.375708475 +0000 UTC m=+1051.959594052" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.157083 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-wgsfr"] Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.158098 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.162863 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-wgsfr"] Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.164879 4865 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-2hcrv" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.295369 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt5v9\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-kube-api-access-vt5v9\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.295435 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-bound-sa-token\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.396911 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt5v9\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-kube-api-access-vt5v9\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.397002 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-bound-sa-token\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.439532 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-bound-sa-token\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.439605 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt5v9\" (UniqueName: \"kubernetes.io/projected/dabe3725-c1fe-4073-9054-b372955d4b6b-kube-api-access-vt5v9\") pod \"cert-manager-86cb77c54b-wgsfr\" (UID: \"dabe3725-c1fe-4073-9054-b372955d4b6b\") " pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.475102 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-wgsfr" Jan 26 17:12:05 crc kubenswrapper[4865]: I0126 17:12:05.916837 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-wgsfr"] Jan 26 17:12:06 crc kubenswrapper[4865]: I0126 17:12:06.354775 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-wgsfr" event={"ID":"dabe3725-c1fe-4073-9054-b372955d4b6b","Type":"ContainerStarted","Data":"87ff045683137ac76cfb56bd07f0f5cd75e4f7386599437bb07846cc2565390b"} Jan 26 17:12:06 crc kubenswrapper[4865]: I0126 17:12:06.355199 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-wgsfr" event={"ID":"dabe3725-c1fe-4073-9054-b372955d4b6b","Type":"ContainerStarted","Data":"fa7e5d67205cca7ed50239cf610ca2259cf2083334d01514b25db01fd40190a8"} Jan 26 17:12:06 crc kubenswrapper[4865]: I0126 17:12:06.373718 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-wgsfr" podStartSLOduration=1.3736896760000001 podStartE2EDuration="1.373689676s" podCreationTimestamp="2026-01-26 17:12:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:12:06.370590058 +0000 UTC m=+1053.954475685" watchObservedRunningTime="2026-01-26 17:12:06.373689676 +0000 UTC m=+1053.957575283" Jan 26 17:12:11 crc kubenswrapper[4865]: I0126 17:12:11.952172 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-jbqqg" Jan 26 17:12:15 crc kubenswrapper[4865]: I0126 17:12:15.163557 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:15 crc kubenswrapper[4865]: I0126 17:12:15.164849 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:15 crc kubenswrapper[4865]: W0126 17:12:15.167516 4865 reflector.go:561] object-"openstack-operators"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Jan 26 17:12:15 crc kubenswrapper[4865]: E0126 17:12:15.167575 4865 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 26 17:12:15 crc kubenswrapper[4865]: W0126 17:12:15.167676 4865 reflector.go:561] object-"openstack-operators"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Jan 26 17:12:15 crc kubenswrapper[4865]: E0126 17:12:15.167690 4865 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 26 17:12:15 crc kubenswrapper[4865]: W0126 17:12:15.169802 4865 reflector.go:561] object-"openstack-operators"/"openstack-operator-index-dockercfg-gchm4": failed to list *v1.Secret: secrets "openstack-operator-index-dockercfg-gchm4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Jan 26 17:12:15 crc kubenswrapper[4865]: E0126 17:12:15.169830 4865 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openstack-operator-index-dockercfg-gchm4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openstack-operator-index-dockercfg-gchm4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Jan 26 17:12:15 crc kubenswrapper[4865]: I0126 17:12:15.189456 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:15 crc kubenswrapper[4865]: I0126 17:12:15.237041 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") pod \"openstack-operator-index-rkj7s\" (UID: \"a9060030-6f18-4f78-997e-0cf23fe670e8\") " pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:15 crc kubenswrapper[4865]: I0126 17:12:15.338217 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") pod \"openstack-operator-index-rkj7s\" (UID: \"a9060030-6f18-4f78-997e-0cf23fe670e8\") " pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.192309 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 26 17:12:16 crc kubenswrapper[4865]: E0126 17:12:16.358881 4865 projected.go:288] Couldn't get configMap openstack-operators/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Jan 26 17:12:16 crc kubenswrapper[4865]: E0126 17:12:16.358956 4865 projected.go:194] Error preparing data for projected volume kube-api-access-csg4b for pod openstack-operators/openstack-operator-index-rkj7s: failed to sync configmap cache: timed out waiting for the condition Jan 26 17:12:16 crc kubenswrapper[4865]: E0126 17:12:16.359065 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b podName:a9060030-6f18-4f78-997e-0cf23fe670e8 nodeName:}" failed. No retries permitted until 2026-01-26 17:12:16.859028028 +0000 UTC m=+1064.442913625 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-csg4b" (UniqueName: "kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b") pod "openstack-operator-index-rkj7s" (UID: "a9060030-6f18-4f78-997e-0cf23fe670e8") : failed to sync configmap cache: timed out waiting for the condition Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.419672 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-gchm4" Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.639497 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.859960 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") pod \"openstack-operator-index-rkj7s\" (UID: \"a9060030-6f18-4f78-997e-0cf23fe670e8\") " pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.866697 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") pod \"openstack-operator-index-rkj7s\" (UID: \"a9060030-6f18-4f78-997e-0cf23fe670e8\") " pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:16 crc kubenswrapper[4865]: I0126 17:12:16.984823 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.341353 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.454453 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.749393 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-25lgf"] Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.751016 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.759241 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-25lgf"] Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.875086 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vskd\" (UniqueName: \"kubernetes.io/projected/4b92d2ae-9860-4398-9efa-b97f65dd6136-kube-api-access-6vskd\") pod \"openstack-operator-index-25lgf\" (UID: \"4b92d2ae-9860-4398-9efa-b97f65dd6136\") " pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:17 crc kubenswrapper[4865]: I0126 17:12:17.976380 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vskd\" (UniqueName: \"kubernetes.io/projected/4b92d2ae-9860-4398-9efa-b97f65dd6136-kube-api-access-6vskd\") pod \"openstack-operator-index-25lgf\" (UID: \"4b92d2ae-9860-4398-9efa-b97f65dd6136\") " pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:18 crc kubenswrapper[4865]: I0126 17:12:18.004826 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vskd\" (UniqueName: \"kubernetes.io/projected/4b92d2ae-9860-4398-9efa-b97f65dd6136-kube-api-access-6vskd\") pod \"openstack-operator-index-25lgf\" (UID: \"4b92d2ae-9860-4398-9efa-b97f65dd6136\") " pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:18 crc kubenswrapper[4865]: I0126 17:12:18.078539 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:18 crc kubenswrapper[4865]: I0126 17:12:18.443820 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkj7s" event={"ID":"a9060030-6f18-4f78-997e-0cf23fe670e8","Type":"ContainerStarted","Data":"c7157f59540914d44af5a6c11fdff43a3a8177ca06d45078ad8d99ff02be878d"} Jan 26 17:12:19 crc kubenswrapper[4865]: I0126 17:12:19.049398 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-25lgf"] Jan 26 17:12:19 crc kubenswrapper[4865]: W0126 17:12:19.313365 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b92d2ae_9860_4398_9efa_b97f65dd6136.slice/crio-f7ab6b1c3caef337076258c8b45913d173bf42de7740f2bb8a6da065e873ff59 WatchSource:0}: Error finding container f7ab6b1c3caef337076258c8b45913d173bf42de7740f2bb8a6da065e873ff59: Status 404 returned error can't find the container with id f7ab6b1c3caef337076258c8b45913d173bf42de7740f2bb8a6da065e873ff59 Jan 26 17:12:19 crc kubenswrapper[4865]: I0126 17:12:19.451793 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-25lgf" event={"ID":"4b92d2ae-9860-4398-9efa-b97f65dd6136","Type":"ContainerStarted","Data":"f7ab6b1c3caef337076258c8b45913d173bf42de7740f2bb8a6da065e873ff59"} Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.460694 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-rkj7s" podUID="a9060030-6f18-4f78-997e-0cf23fe670e8" containerName="registry-server" containerID="cri-o://dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e" gracePeriod=2 Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.460967 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkj7s" event={"ID":"a9060030-6f18-4f78-997e-0cf23fe670e8","Type":"ContainerStarted","Data":"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e"} Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.465699 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-25lgf" event={"ID":"4b92d2ae-9860-4398-9efa-b97f65dd6136","Type":"ContainerStarted","Data":"a1f82483945d6256b2ce05fd5ddafda70fcbff786049ca7d0c42aa34287876aa"} Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.481900 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-rkj7s" podStartSLOduration=2.944509201 podStartE2EDuration="5.48186903s" podCreationTimestamp="2026-01-26 17:12:15 +0000 UTC" firstStartedPulling="2026-01-26 17:12:17.464520642 +0000 UTC m=+1065.048406229" lastFinishedPulling="2026-01-26 17:12:20.001880471 +0000 UTC m=+1067.585766058" observedRunningTime="2026-01-26 17:12:20.478774372 +0000 UTC m=+1068.062659959" watchObservedRunningTime="2026-01-26 17:12:20.48186903 +0000 UTC m=+1068.065754617" Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.499821 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-25lgf" podStartSLOduration=2.822890275 podStartE2EDuration="3.499800199s" podCreationTimestamp="2026-01-26 17:12:17 +0000 UTC" firstStartedPulling="2026-01-26 17:12:19.316811495 +0000 UTC m=+1066.900697082" lastFinishedPulling="2026-01-26 17:12:19.993721419 +0000 UTC m=+1067.577607006" observedRunningTime="2026-01-26 17:12:20.496354961 +0000 UTC m=+1068.080240568" watchObservedRunningTime="2026-01-26 17:12:20.499800199 +0000 UTC m=+1068.083685776" Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.867742 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:20 crc kubenswrapper[4865]: I0126 17:12:20.997971 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") pod \"a9060030-6f18-4f78-997e-0cf23fe670e8\" (UID: \"a9060030-6f18-4f78-997e-0cf23fe670e8\") " Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.005451 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b" (OuterVolumeSpecName: "kube-api-access-csg4b") pod "a9060030-6f18-4f78-997e-0cf23fe670e8" (UID: "a9060030-6f18-4f78-997e-0cf23fe670e8"). InnerVolumeSpecName "kube-api-access-csg4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.099399 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csg4b\" (UniqueName: \"kubernetes.io/projected/a9060030-6f18-4f78-997e-0cf23fe670e8-kube-api-access-csg4b\") on node \"crc\" DevicePath \"\"" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.475718 4865 generic.go:334] "Generic (PLEG): container finished" podID="a9060030-6f18-4f78-997e-0cf23fe670e8" containerID="dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e" exitCode=0 Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.475787 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-rkj7s" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.475811 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkj7s" event={"ID":"a9060030-6f18-4f78-997e-0cf23fe670e8","Type":"ContainerDied","Data":"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e"} Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.475893 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-rkj7s" event={"ID":"a9060030-6f18-4f78-997e-0cf23fe670e8","Type":"ContainerDied","Data":"c7157f59540914d44af5a6c11fdff43a3a8177ca06d45078ad8d99ff02be878d"} Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.475921 4865 scope.go:117] "RemoveContainer" containerID="dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.506112 4865 scope.go:117] "RemoveContainer" containerID="dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e" Jan 26 17:12:21 crc kubenswrapper[4865]: E0126 17:12:21.506691 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e\": container with ID starting with dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e not found: ID does not exist" containerID="dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.506754 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e"} err="failed to get container status \"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e\": rpc error: code = NotFound desc = could not find container \"dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e\": container with ID starting with dadca7cd37a6fdf3948416f983b1e6003aae606320b8a0b9949826c7b08cce0e not found: ID does not exist" Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.507264 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:21 crc kubenswrapper[4865]: I0126 17:12:21.511652 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-rkj7s"] Jan 26 17:12:22 crc kubenswrapper[4865]: I0126 17:12:22.368336 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9060030-6f18-4f78-997e-0cf23fe670e8" path="/var/lib/kubelet/pods/a9060030-6f18-4f78-997e-0cf23fe670e8/volumes" Jan 26 17:12:28 crc kubenswrapper[4865]: I0126 17:12:28.078961 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:28 crc kubenswrapper[4865]: I0126 17:12:28.079568 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:28 crc kubenswrapper[4865]: I0126 17:12:28.112733 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:28 crc kubenswrapper[4865]: I0126 17:12:28.561787 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-25lgf" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.778959 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z"] Jan 26 17:12:34 crc kubenswrapper[4865]: E0126 17:12:34.779854 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9060030-6f18-4f78-997e-0cf23fe670e8" containerName="registry-server" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.779872 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9060030-6f18-4f78-997e-0cf23fe670e8" containerName="registry-server" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.780038 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9060030-6f18-4f78-997e-0cf23fe670e8" containerName="registry-server" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.781026 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.782941 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8tnql" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.788275 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z"] Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.926843 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.927257 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbxgh\" (UniqueName: \"kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:34 crc kubenswrapper[4865]: I0126 17:12:34.927399 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.028229 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.028281 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbxgh\" (UniqueName: \"kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.028318 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.029238 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.029575 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.050206 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbxgh\" (UniqueName: \"kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh\") pod \"a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.100301 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.515868 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z"] Jan 26 17:12:35 crc kubenswrapper[4865]: W0126 17:12:35.519290 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b8d0e19_e4c9_44ec_aef9_4e001f21a0ac.slice/crio-7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4 WatchSource:0}: Error finding container 7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4: Status 404 returned error can't find the container with id 7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4 Jan 26 17:12:35 crc kubenswrapper[4865]: I0126 17:12:35.587562 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" event={"ID":"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac","Type":"ContainerStarted","Data":"7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4"} Jan 26 17:12:36 crc kubenswrapper[4865]: I0126 17:12:36.597148 4865 generic.go:334] "Generic (PLEG): container finished" podID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerID="f11819d7d0e6b48bc511b82225d272f789116dcfb4b11dc4dae41ddb8d12fc12" exitCode=0 Jan 26 17:12:36 crc kubenswrapper[4865]: I0126 17:12:36.597248 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" event={"ID":"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac","Type":"ContainerDied","Data":"f11819d7d0e6b48bc511b82225d272f789116dcfb4b11dc4dae41ddb8d12fc12"} Jan 26 17:12:37 crc kubenswrapper[4865]: I0126 17:12:37.611855 4865 generic.go:334] "Generic (PLEG): container finished" podID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerID="41a31ce9041ade01906548a238128048e34c83dd4c73874643561628be97572d" exitCode=0 Jan 26 17:12:37 crc kubenswrapper[4865]: I0126 17:12:37.611969 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" event={"ID":"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac","Type":"ContainerDied","Data":"41a31ce9041ade01906548a238128048e34c83dd4c73874643561628be97572d"} Jan 26 17:12:38 crc kubenswrapper[4865]: I0126 17:12:38.621790 4865 generic.go:334] "Generic (PLEG): container finished" podID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerID="ddcc8089eb57e73a1cad3653cf249a25f06eaa9cf8c6670932d74674c79fe23a" exitCode=0 Jan 26 17:12:38 crc kubenswrapper[4865]: I0126 17:12:38.621901 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" event={"ID":"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac","Type":"ContainerDied","Data":"ddcc8089eb57e73a1cad3653cf249a25f06eaa9cf8c6670932d74674c79fe23a"} Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.890544 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.949117 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbxgh\" (UniqueName: \"kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh\") pod \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.949200 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle\") pod \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.949311 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util\") pod \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\" (UID: \"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac\") " Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.950178 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle" (OuterVolumeSpecName: "bundle") pod "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" (UID: "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.958427 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh" (OuterVolumeSpecName: "kube-api-access-lbxgh") pod "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" (UID: "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac"). InnerVolumeSpecName "kube-api-access-lbxgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:12:39 crc kubenswrapper[4865]: I0126 17:12:39.976822 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util" (OuterVolumeSpecName: "util") pod "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" (UID: "5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.050917 4865 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.050972 4865 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-util\") on node \"crc\" DevicePath \"\"" Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.050984 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbxgh\" (UniqueName: \"kubernetes.io/projected/5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac-kube-api-access-lbxgh\") on node \"crc\" DevicePath \"\"" Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.637268 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" event={"ID":"5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac","Type":"ContainerDied","Data":"7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4"} Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.637658 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e5dc0838fe6623bdf387be13f450c032058c92ba1cf5803af6611ef40b701f4" Jan 26 17:12:40 crc kubenswrapper[4865]: I0126 17:12:40.637364 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.196190 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:12:46 crc kubenswrapper[4865]: E0126 17:12:46.197200 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="pull" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.197217 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="pull" Jan 26 17:12:46 crc kubenswrapper[4865]: E0126 17:12:46.197233 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="util" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.197240 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="util" Jan 26 17:12:46 crc kubenswrapper[4865]: E0126 17:12:46.197250 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="extract" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.197259 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="extract" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.197414 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac" containerName="extract" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.198019 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.203423 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-8m4fh" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.226907 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.344549 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czzs6\" (UniqueName: \"kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6\") pod \"openstack-operator-controller-init-bb665b4d9-6lbn8\" (UID: \"206b70b2-3963-4d20-9bff-3ce16c46ef86\") " pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.446496 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czzs6\" (UniqueName: \"kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6\") pod \"openstack-operator-controller-init-bb665b4d9-6lbn8\" (UID: \"206b70b2-3963-4d20-9bff-3ce16c46ef86\") " pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.469845 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czzs6\" (UniqueName: \"kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6\") pod \"openstack-operator-controller-init-bb665b4d9-6lbn8\" (UID: \"206b70b2-3963-4d20-9bff-3ce16c46ef86\") " pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.516409 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:46 crc kubenswrapper[4865]: I0126 17:12:46.787543 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:12:47 crc kubenswrapper[4865]: I0126 17:12:47.695051 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" event={"ID":"206b70b2-3963-4d20-9bff-3ce16c46ef86","Type":"ContainerStarted","Data":"206c9f39748ddde645376537f41eded75ebc38af1952128c89db86fc8a681943"} Jan 26 17:12:54 crc kubenswrapper[4865]: I0126 17:12:54.761122 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" event={"ID":"206b70b2-3963-4d20-9bff-3ce16c46ef86","Type":"ContainerStarted","Data":"652e0ddbdcba627df82fdb5ec20e0b3a0458a70167c4e63f650d30def45b2a8b"} Jan 26 17:12:54 crc kubenswrapper[4865]: I0126 17:12:54.761750 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:12:54 crc kubenswrapper[4865]: I0126 17:12:54.803769 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" podStartSLOduration=1.308024442 podStartE2EDuration="8.803748232s" podCreationTimestamp="2026-01-26 17:12:46 +0000 UTC" firstStartedPulling="2026-01-26 17:12:46.809442454 +0000 UTC m=+1094.393328041" lastFinishedPulling="2026-01-26 17:12:54.305166244 +0000 UTC m=+1101.889051831" observedRunningTime="2026-01-26 17:12:54.800684645 +0000 UTC m=+1102.384570252" watchObservedRunningTime="2026-01-26 17:12:54.803748232 +0000 UTC m=+1102.387633819" Jan 26 17:13:06 crc kubenswrapper[4865]: I0126 17:13:06.519588 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.463760 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.466342 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.469096 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-dgrtv" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.469248 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.470449 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.480453 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.481211 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-gksx7" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.481670 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.484578 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-c9rq7" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.494315 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.502167 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.522667 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p2ss\" (UniqueName: \"kubernetes.io/projected/e4eb0e9f-c220-4e50-9a7e-89da7fe708d9-kube-api-access-9p2ss\") pod \"barbican-operator-controller-manager-7f86f8796f-bkj4l\" (UID: \"e4eb0e9f-c220-4e50-9a7e-89da7fe708d9\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.522723 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jljb\" (UniqueName: \"kubernetes.io/projected/447fe2d4-cd26-4a3b-9cb0-5834991e70f4-kube-api-access-2jljb\") pod \"designate-operator-controller-manager-b45d7bf98-l5z5s\" (UID: \"447fe2d4-cd26-4a3b-9cb0-5834991e70f4\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.522761 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgzc7\" (UniqueName: \"kubernetes.io/projected/b37c27a9-7292-41a4-92bb-584e9b492aa0-kube-api-access-hgzc7\") pod \"cinder-operator-controller-manager-7478f7dbf9-8cwj6\" (UID: \"b37c27a9-7292-41a4-92bb-584e9b492aa0\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.547093 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.556646 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.557593 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.562398 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-l6k2q" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.566783 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.567885 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.572454 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-tb2rm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.573088 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.588593 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.589988 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.596824 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-qrmhk" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.600670 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.610341 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.611561 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.614247 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mkvjx" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.620924 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.621255 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625488 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czf7z\" (UniqueName: \"kubernetes.io/projected/374004dc-56e6-4af1-9d53-0a14477e623f-kube-api-access-czf7z\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625523 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625554 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p2ss\" (UniqueName: \"kubernetes.io/projected/e4eb0e9f-c220-4e50-9a7e-89da7fe708d9-kube-api-access-9p2ss\") pod \"barbican-operator-controller-manager-7f86f8796f-bkj4l\" (UID: \"e4eb0e9f-c220-4e50-9a7e-89da7fe708d9\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625581 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jljb\" (UniqueName: \"kubernetes.io/projected/447fe2d4-cd26-4a3b-9cb0-5834991e70f4-kube-api-access-2jljb\") pod \"designate-operator-controller-manager-b45d7bf98-l5z5s\" (UID: \"447fe2d4-cd26-4a3b-9cb0-5834991e70f4\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625614 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvmlk\" (UniqueName: \"kubernetes.io/projected/796eb6f5-8010-4397-9328-de3b605107be-kube-api-access-fvmlk\") pod \"horizon-operator-controller-manager-77d5c5b54f-t9v67\" (UID: \"796eb6f5-8010-4397-9328-de3b605107be\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625635 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgzc7\" (UniqueName: \"kubernetes.io/projected/b37c27a9-7292-41a4-92bb-584e9b492aa0-kube-api-access-hgzc7\") pod \"cinder-operator-controller-manager-7478f7dbf9-8cwj6\" (UID: \"b37c27a9-7292-41a4-92bb-584e9b492aa0\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625665 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k8d5\" (UniqueName: \"kubernetes.io/projected/eababfca-3249-49df-b6d4-364e669e4b1e-kube-api-access-4k8d5\") pod \"glance-operator-controller-manager-78fdd796fd-jjnkn\" (UID: \"eababfca-3249-49df-b6d4-364e669e4b1e\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.625686 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwhgg\" (UniqueName: \"kubernetes.io/projected/fb50fe13-a8bb-4115-85ab-677105257e40-kube-api-access-zwhgg\") pod \"heat-operator-controller-manager-594c8c9d5d-c2786\" (UID: \"fb50fe13-a8bb-4115-85ab-677105257e40\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.638489 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.645825 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.646725 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.649562 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-bkvvp" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.658739 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.668854 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgzc7\" (UniqueName: \"kubernetes.io/projected/b37c27a9-7292-41a4-92bb-584e9b492aa0-kube-api-access-hgzc7\") pod \"cinder-operator-controller-manager-7478f7dbf9-8cwj6\" (UID: \"b37c27a9-7292-41a4-92bb-584e9b492aa0\") " pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.677975 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jljb\" (UniqueName: \"kubernetes.io/projected/447fe2d4-cd26-4a3b-9cb0-5834991e70f4-kube-api-access-2jljb\") pod \"designate-operator-controller-manager-b45d7bf98-l5z5s\" (UID: \"447fe2d4-cd26-4a3b-9cb0-5834991e70f4\") " pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.678107 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.679137 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.680297 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p2ss\" (UniqueName: \"kubernetes.io/projected/e4eb0e9f-c220-4e50-9a7e-89da7fe708d9-kube-api-access-9p2ss\") pod \"barbican-operator-controller-manager-7f86f8796f-bkj4l\" (UID: \"e4eb0e9f-c220-4e50-9a7e-89da7fe708d9\") " pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.684192 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.689421 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-f4grk" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.726705 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lqrk\" (UniqueName: \"kubernetes.io/projected/1f630a72-d9f3-4af8-9b61-11b0551ad19f-kube-api-access-6lqrk\") pod \"ironic-operator-controller-manager-598f7747c9-kblgz\" (UID: \"1f630a72-d9f3-4af8-9b61-11b0551ad19f\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727090 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvmlk\" (UniqueName: \"kubernetes.io/projected/796eb6f5-8010-4397-9328-de3b605107be-kube-api-access-fvmlk\") pod \"horizon-operator-controller-manager-77d5c5b54f-t9v67\" (UID: \"796eb6f5-8010-4397-9328-de3b605107be\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727144 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k8d5\" (UniqueName: \"kubernetes.io/projected/eababfca-3249-49df-b6d4-364e669e4b1e-kube-api-access-4k8d5\") pod \"glance-operator-controller-manager-78fdd796fd-jjnkn\" (UID: \"eababfca-3249-49df-b6d4-364e669e4b1e\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727176 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwhgg\" (UniqueName: \"kubernetes.io/projected/fb50fe13-a8bb-4115-85ab-677105257e40-kube-api-access-zwhgg\") pod \"heat-operator-controller-manager-594c8c9d5d-c2786\" (UID: \"fb50fe13-a8bb-4115-85ab-677105257e40\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727303 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z599\" (UniqueName: \"kubernetes.io/projected/de67db31-b314-42ed-ac57-68f9fc5aab7c-kube-api-access-2z599\") pod \"keystone-operator-controller-manager-b8b6d4659-xgdcd\" (UID: \"de67db31-b314-42ed-ac57-68f9fc5aab7c\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727355 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czf7z\" (UniqueName: \"kubernetes.io/projected/374004dc-56e6-4af1-9d53-0a14477e623f-kube-api-access-czf7z\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.727446 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.729653 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.738707 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:13:27 crc kubenswrapper[4865]: E0126 17:13:27.756788 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:27 crc kubenswrapper[4865]: E0126 17:13:27.756912 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:28.256867995 +0000 UTC m=+1135.840753582 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.775456 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.776488 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-8ml42" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.778096 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.787153 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-7jbt7" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.789292 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvmlk\" (UniqueName: \"kubernetes.io/projected/796eb6f5-8010-4397-9328-de3b605107be-kube-api-access-fvmlk\") pod \"horizon-operator-controller-manager-77d5c5b54f-t9v67\" (UID: \"796eb6f5-8010-4397-9328-de3b605107be\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.795417 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czf7z\" (UniqueName: \"kubernetes.io/projected/374004dc-56e6-4af1-9d53-0a14477e623f-kube-api-access-czf7z\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.820668 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.823201 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.829883 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwhgg\" (UniqueName: \"kubernetes.io/projected/fb50fe13-a8bb-4115-85ab-677105257e40-kube-api-access-zwhgg\") pod \"heat-operator-controller-manager-594c8c9d5d-c2786\" (UID: \"fb50fe13-a8bb-4115-85ab-677105257e40\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.836431 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k8d5\" (UniqueName: \"kubernetes.io/projected/eababfca-3249-49df-b6d4-364e669e4b1e-kube-api-access-4k8d5\") pod \"glance-operator-controller-manager-78fdd796fd-jjnkn\" (UID: \"eababfca-3249-49df-b6d4-364e669e4b1e\") " pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.836785 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.853077 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.864424 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hnqp\" (UniqueName: \"kubernetes.io/projected/40656d26-d2a5-4728-a67c-1880fb430675-kube-api-access-6hnqp\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-d2544\" (UID: \"40656d26-d2a5-4728-a67c-1880fb430675\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.864482 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lqrk\" (UniqueName: \"kubernetes.io/projected/1f630a72-d9f3-4af8-9b61-11b0551ad19f-kube-api-access-6lqrk\") pod \"ironic-operator-controller-manager-598f7747c9-kblgz\" (UID: \"1f630a72-d9f3-4af8-9b61-11b0551ad19f\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.864511 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz7rg\" (UniqueName: \"kubernetes.io/projected/9532dd1b-a2e9-4e25-b6ae-55f9d9e42859-kube-api-access-xz7rg\") pod \"manila-operator-controller-manager-78c6999f6f-jdbmf\" (UID: \"9532dd1b-a2e9-4e25-b6ae-55f9d9e42859\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.864562 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z599\" (UniqueName: \"kubernetes.io/projected/de67db31-b314-42ed-ac57-68f9fc5aab7c-kube-api-access-2z599\") pod \"keystone-operator-controller-manager-b8b6d4659-xgdcd\" (UID: \"de67db31-b314-42ed-ac57-68f9fc5aab7c\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.872065 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.873303 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.878970 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-m6hhk" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.885362 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.896447 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.900422 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.911202 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lqrk\" (UniqueName: \"kubernetes.io/projected/1f630a72-d9f3-4af8-9b61-11b0551ad19f-kube-api-access-6lqrk\") pod \"ironic-operator-controller-manager-598f7747c9-kblgz\" (UID: \"1f630a72-d9f3-4af8-9b61-11b0551ad19f\") " pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.921838 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.931725 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z599\" (UniqueName: \"kubernetes.io/projected/de67db31-b314-42ed-ac57-68f9fc5aab7c-kube-api-access-2z599\") pod \"keystone-operator-controller-manager-b8b6d4659-xgdcd\" (UID: \"de67db31-b314-42ed-ac57-68f9fc5aab7c\") " pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.939884 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.941651 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.944095 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-j79qc" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.966468 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hnqp\" (UniqueName: \"kubernetes.io/projected/40656d26-d2a5-4728-a67c-1880fb430675-kube-api-access-6hnqp\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-d2544\" (UID: \"40656d26-d2a5-4728-a67c-1880fb430675\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.966547 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz7rg\" (UniqueName: \"kubernetes.io/projected/9532dd1b-a2e9-4e25-b6ae-55f9d9e42859-kube-api-access-xz7rg\") pod \"manila-operator-controller-manager-78c6999f6f-jdbmf\" (UID: \"9532dd1b-a2e9-4e25-b6ae-55f9d9e42859\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.966601 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhxgl\" (UniqueName: \"kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl\") pod \"nova-operator-controller-manager-7fff5bf694-l9xql\" (UID: \"6fe7b69f-8c3c-413a-b838-9abd3708a60c\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.979928 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52"] Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.985308 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.987884 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-br7x8" Jan 26 17:13:27 crc kubenswrapper[4865]: I0126 17:13:27.994132 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hnqp\" (UniqueName: \"kubernetes.io/projected/40656d26-d2a5-4728-a67c-1880fb430675-kube-api-access-6hnqp\") pod \"mariadb-operator-controller-manager-6b9fb5fdcb-d2544\" (UID: \"40656d26-d2a5-4728-a67c-1880fb430675\") " pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.010390 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.011241 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz7rg\" (UniqueName: \"kubernetes.io/projected/9532dd1b-a2e9-4e25-b6ae-55f9d9e42859-kube-api-access-xz7rg\") pod \"manila-operator-controller-manager-78c6999f6f-jdbmf\" (UID: \"9532dd1b-a2e9-4e25-b6ae-55f9d9e42859\") " pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.026056 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.036402 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.036915 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.067949 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.068951 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.072180 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc2zh\" (UniqueName: \"kubernetes.io/projected/e65ea7be-88a0-4d65-ac76-b1956c034abd-kube-api-access-tc2zh\") pod \"neutron-operator-controller-manager-78d58447c5-psb52\" (UID: \"e65ea7be-88a0-4d65-ac76-b1956c034abd\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.072260 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hqdt\" (UniqueName: \"kubernetes.io/projected/d0457508-04e0-4b3e-a84a-097a47ee346e-kube-api-access-5hqdt\") pod \"octavia-operator-controller-manager-5f4cd88d46-7c4wx\" (UID: \"d0457508-04e0-4b3e-a84a-097a47ee346e\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.072365 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhxgl\" (UniqueName: \"kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl\") pod \"nova-operator-controller-manager-7fff5bf694-l9xql\" (UID: \"6fe7b69f-8c3c-413a-b838-9abd3708a60c\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.080069 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-ltbsv" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.095543 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.097822 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.103835 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhxgl\" (UniqueName: \"kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl\") pod \"nova-operator-controller-manager-7fff5bf694-l9xql\" (UID: \"6fe7b69f-8c3c-413a-b838-9abd3708a60c\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.105274 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-h2z45" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.105711 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.119500 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.132648 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.140275 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-bf5cs" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.145333 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.161810 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.173903 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.174038 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dmtp\" (UniqueName: \"kubernetes.io/projected/f7123b2b-df5a-4ae2-bfe6-f067fc318b5b-kube-api-access-7dmtp\") pod \"ovn-operator-controller-manager-6f75f45d54-zb94x\" (UID: \"f7123b2b-df5a-4ae2-bfe6-f067fc318b5b\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.174089 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz52w\" (UniqueName: \"kubernetes.io/projected/4444365f-5137-411d-ba00-25c9f9a7390f-kube-api-access-jz52w\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.174157 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc2zh\" (UniqueName: \"kubernetes.io/projected/e65ea7be-88a0-4d65-ac76-b1956c034abd-kube-api-access-tc2zh\") pod \"neutron-operator-controller-manager-78d58447c5-psb52\" (UID: \"e65ea7be-88a0-4d65-ac76-b1956c034abd\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.174204 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hqdt\" (UniqueName: \"kubernetes.io/projected/d0457508-04e0-4b3e-a84a-097a47ee346e-kube-api-access-5hqdt\") pod \"octavia-operator-controller-manager-5f4cd88d46-7c4wx\" (UID: \"d0457508-04e0-4b3e-a84a-097a47ee346e\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.174252 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mrl4\" (UniqueName: \"kubernetes.io/projected/c0022f58-7356-41ad-a2da-362c9f9bfd73-kube-api-access-4mrl4\") pod \"placement-operator-controller-manager-79d5ccc684-v2ff5\" (UID: \"c0022f58-7356-41ad-a2da-362c9f9bfd73\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.202380 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.234111 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.245738 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.275933 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.276957 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.278234 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz52w\" (UniqueName: \"kubernetes.io/projected/4444365f-5137-411d-ba00-25c9f9a7390f-kube-api-access-jz52w\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.278293 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mrl4\" (UniqueName: \"kubernetes.io/projected/c0022f58-7356-41ad-a2da-362c9f9bfd73-kube-api-access-4mrl4\") pod \"placement-operator-controller-manager-79d5ccc684-v2ff5\" (UID: \"c0022f58-7356-41ad-a2da-362c9f9bfd73\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.278338 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.278390 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.278406 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dmtp\" (UniqueName: \"kubernetes.io/projected/f7123b2b-df5a-4ae2-bfe6-f067fc318b5b-kube-api-access-7dmtp\") pod \"ovn-operator-controller-manager-6f75f45d54-zb94x\" (UID: \"f7123b2b-df5a-4ae2-bfe6-f067fc318b5b\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.278843 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.278881 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:28.778868078 +0000 UTC m=+1136.362753665 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.279025 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.279048 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:29.279039762 +0000 UTC m=+1136.862925349 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.280667 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.288641 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.312369 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-bqj78" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.337134 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.338455 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.343856 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hqdt\" (UniqueName: \"kubernetes.io/projected/d0457508-04e0-4b3e-a84a-097a47ee346e-kube-api-access-5hqdt\") pod \"octavia-operator-controller-manager-5f4cd88d46-7c4wx\" (UID: \"d0457508-04e0-4b3e-a84a-097a47ee346e\") " pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.345715 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc2zh\" (UniqueName: \"kubernetes.io/projected/e65ea7be-88a0-4d65-ac76-b1956c034abd-kube-api-access-tc2zh\") pod \"neutron-operator-controller-manager-78d58447c5-psb52\" (UID: \"e65ea7be-88a0-4d65-ac76-b1956c034abd\") " pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.348792 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.355093 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.381397 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-zxgqm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.381832 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdd86\" (UniqueName: \"kubernetes.io/projected/00f471fe-001b-40e6-91be-92a8f71bc951-kube-api-access-hdd86\") pod \"swift-operator-controller-manager-547cbdb99f-lb26q\" (UID: \"00f471fe-001b-40e6-91be-92a8f71bc951\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.381935 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl46g\" (UniqueName: \"kubernetes.io/projected/e9f2346a-220f-4638-8faa-c2eba1914417-kube-api-access-pl46g\") pod \"telemetry-operator-controller-manager-85cd9769bb-gwrzm\" (UID: \"e9f2346a-220f-4638-8faa-c2eba1914417\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.396227 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz52w\" (UniqueName: \"kubernetes.io/projected/4444365f-5137-411d-ba00-25c9f9a7390f-kube-api-access-jz52w\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.400715 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mrl4\" (UniqueName: \"kubernetes.io/projected/c0022f58-7356-41ad-a2da-362c9f9bfd73-kube-api-access-4mrl4\") pod \"placement-operator-controller-manager-79d5ccc684-v2ff5\" (UID: \"c0022f58-7356-41ad-a2da-362c9f9bfd73\") " pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.456742 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.458077 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.464729 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.465406 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-k4d4j" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.465468 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.476390 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dmtp\" (UniqueName: \"kubernetes.io/projected/f7123b2b-df5a-4ae2-bfe6-f067fc318b5b-kube-api-access-7dmtp\") pod \"ovn-operator-controller-manager-6f75f45d54-zb94x\" (UID: \"f7123b2b-df5a-4ae2-bfe6-f067fc318b5b\") " pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.483219 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdd86\" (UniqueName: \"kubernetes.io/projected/00f471fe-001b-40e6-91be-92a8f71bc951-kube-api-access-hdd86\") pod \"swift-operator-controller-manager-547cbdb99f-lb26q\" (UID: \"00f471fe-001b-40e6-91be-92a8f71bc951\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.483300 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl46g\" (UniqueName: \"kubernetes.io/projected/e9f2346a-220f-4638-8faa-c2eba1914417-kube-api-access-pl46g\") pod \"telemetry-operator-controller-manager-85cd9769bb-gwrzm\" (UID: \"e9f2346a-220f-4638-8faa-c2eba1914417\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.591210 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl46g\" (UniqueName: \"kubernetes.io/projected/e9f2346a-220f-4638-8faa-c2eba1914417-kube-api-access-pl46g\") pod \"telemetry-operator-controller-manager-85cd9769bb-gwrzm\" (UID: \"e9f2346a-220f-4638-8faa-c2eba1914417\") " pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.592408 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2xnf\" (UniqueName: \"kubernetes.io/projected/fcf22a02-e91a-43a4-9e2b-738373b94769-kube-api-access-l2xnf\") pod \"test-operator-controller-manager-69797bbcbd-pk5pz\" (UID: \"fcf22a02-e91a-43a4-9e2b-738373b94769\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.592753 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-pksm4"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.594188 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.602101 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.774720 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-pksm4"] Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.776896 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.813052 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrgdv\" (UniqueName: \"kubernetes.io/projected/68710299-edf4-4c26-b035-a33e03f89d5f-kube-api-access-zrgdv\") pod \"watcher-operator-controller-manager-564965969-pksm4\" (UID: \"68710299-edf4-4c26-b035-a33e03f89d5f\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.813174 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2xnf\" (UniqueName: \"kubernetes.io/projected/fcf22a02-e91a-43a4-9e2b-738373b94769-kube-api-access-l2xnf\") pod \"test-operator-controller-manager-69797bbcbd-pk5pz\" (UID: \"fcf22a02-e91a-43a4-9e2b-738373b94769\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.813228 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.813533 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.843970 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-xn894" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.849749 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.857127 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: E0126 17:13:28.857337 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:29.857212561 +0000 UTC m=+1137.441098148 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.883852 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdd86\" (UniqueName: \"kubernetes.io/projected/00f471fe-001b-40e6-91be-92a8f71bc951-kube-api-access-hdd86\") pod \"swift-operator-controller-manager-547cbdb99f-lb26q\" (UID: \"00f471fe-001b-40e6-91be-92a8f71bc951\") " pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.917182 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrgdv\" (UniqueName: \"kubernetes.io/projected/68710299-edf4-4c26-b035-a33e03f89d5f-kube-api-access-zrgdv\") pod \"watcher-operator-controller-manager-564965969-pksm4\" (UID: \"68710299-edf4-4c26-b035-a33e03f89d5f\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:13:28 crc kubenswrapper[4865]: I0126 17:13:28.921330 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.009881 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn"] Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.098065 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.379866 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.379973 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.380027 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.380092 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-559jc\" (UniqueName: \"kubernetes.io/projected/ab8ab562-a119-48c2-b431-abe25a789d74-kube-api-access-559jc\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.381735 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.381795 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:31.381775317 +0000 UTC m=+1138.965660904 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.551228 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.551796 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-jfvv6" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.551929 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.556654 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.560064 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-559jc\" (UniqueName: \"kubernetes.io/projected/ab8ab562-a119-48c2-b431-abe25a789d74-kube-api-access-559jc\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.560625 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.557892 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.562814 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:30.06279123 +0000 UTC m=+1137.646676817 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.562622 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.563468 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:30.063454119 +0000 UTC m=+1137.647339706 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.569437 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn"] Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.585268 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrgdv\" (UniqueName: \"kubernetes.io/projected/68710299-edf4-4c26-b035-a33e03f89d5f-kube-api-access-zrgdv\") pod \"watcher-operator-controller-manager-564965969-pksm4\" (UID: \"68710299-edf4-4c26-b035-a33e03f89d5f\") " pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.591126 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2xnf\" (UniqueName: \"kubernetes.io/projected/fcf22a02-e91a-43a4-9e2b-738373b94769-kube-api-access-l2xnf\") pod \"test-operator-controller-manager-69797bbcbd-pk5pz\" (UID: \"fcf22a02-e91a-43a4-9e2b-738373b94769\") " pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.705946 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.781524 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-559jc\" (UniqueName: \"kubernetes.io/projected/ab8ab562-a119-48c2-b431-abe25a789d74-kube-api-access-559jc\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.800685 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn"] Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.809521 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.844139 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb"] Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.845615 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.850881 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-ml9bh" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.861852 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb"] Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.867568 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.871234 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: E0126 17:13:29.871369 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:31.871337638 +0000 UTC m=+1139.455223235 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.871252 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29kf9\" (UniqueName: \"kubernetes.io/projected/dd2b4860-4a17-43e4-9909-afbe647519c4-kube-api-access-29kf9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-9f7sb\" (UID: \"dd2b4860-4a17-43e4-9909-afbe647519c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.972895 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29kf9\" (UniqueName: \"kubernetes.io/projected/dd2b4860-4a17-43e4-9909-afbe647519c4-kube-api-access-29kf9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-9f7sb\" (UID: \"dd2b4860-4a17-43e4-9909-afbe647519c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" Jan 26 17:13:29 crc kubenswrapper[4865]: I0126 17:13:29.995143 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29kf9\" (UniqueName: \"kubernetes.io/projected/dd2b4860-4a17-43e4-9909-afbe647519c4-kube-api-access-29kf9\") pod \"rabbitmq-cluster-operator-manager-668c99d594-9f7sb\" (UID: \"dd2b4860-4a17-43e4-9909-afbe647519c4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" Jan 26 17:13:30 crc kubenswrapper[4865]: W0126 17:13:30.016674 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeababfca_3249_49df_b6d4_364e669e4b1e.slice/crio-a4a6286b87016ee1b72e066f32bc0a5edd762b354da71631770f9c59596fa1e6 WatchSource:0}: Error finding container a4a6286b87016ee1b72e066f32bc0a5edd762b354da71631770f9c59596fa1e6: Status 404 returned error can't find the container with id a4a6286b87016ee1b72e066f32bc0a5edd762b354da71631770f9c59596fa1e6 Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.073469 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.074039 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:30 crc kubenswrapper[4865]: E0126 17:13:30.075375 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:30 crc kubenswrapper[4865]: E0126 17:13:30.075574 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:31.07554616 +0000 UTC m=+1138.659431747 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:30 crc kubenswrapper[4865]: E0126 17:13:30.076383 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:30 crc kubenswrapper[4865]: E0126 17:13:30.076579 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:31.076562899 +0000 UTC m=+1138.660448486 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.176597 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.522778 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l"] Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.537649 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s"] Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.644140 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" event={"ID":"e4eb0e9f-c220-4e50-9a7e-89da7fe708d9","Type":"ContainerStarted","Data":"6989096eebf367e4f5a172825c84b00c51f80274efd66169f2a11722a1a56908"} Jan 26 17:13:30 crc kubenswrapper[4865]: I0126 17:13:30.645231 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" event={"ID":"eababfca-3249-49df-b6d4-364e669e4b1e","Type":"ContainerStarted","Data":"a4a6286b87016ee1b72e066f32bc0a5edd762b354da71631770f9c59596fa1e6"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.079446 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.080751 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.080509 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.081061 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:33.081042022 +0000 UTC m=+1140.664927609 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.080974 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.081379 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:33.081349001 +0000 UTC m=+1140.665234618 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.162254 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.167684 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.173963 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.177725 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.253930 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.385303 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.385485 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.385532 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:35.385517923 +0000 UTC m=+1142.969403510 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.409773 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.451677 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.464944 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.489366 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5"] Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.495907 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9532dd1b_a2e9_4e25_b6ae_55f9d9e42859.slice/crio-e4c4b3fe1a3c4101e9340dc90259da09fdf75ac27b7cbe9a1726b63ad513299f WatchSource:0}: Error finding container e4c4b3fe1a3c4101e9340dc90259da09fdf75ac27b7cbe9a1726b63ad513299f: Status 404 returned error can't find the container with id e4c4b3fe1a3c4101e9340dc90259da09fdf75ac27b7cbe9a1726b63ad513299f Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.497200 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544"] Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.499117 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40656d26_d2a5_4728_a67c_1880fb430675.slice/crio-3d907a0c4607a65da2b52d74b443d37aaec0d73cb846db8228ca7eb3077f9a03 WatchSource:0}: Error finding container 3d907a0c4607a65da2b52d74b443d37aaec0d73cb846db8228ca7eb3077f9a03: Status 404 returned error can't find the container with id 3d907a0c4607a65da2b52d74b443d37aaec0d73cb846db8228ca7eb3077f9a03 Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.499536 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0022f58_7356_41ad_a2da_362c9f9bfd73.slice/crio-d6486b4bbc28338df0de739db79a4a49c0ebf8b6c56e6c53759ea489218871b0 WatchSource:0}: Error finding container d6486b4bbc28338df0de739db79a4a49c0ebf8b6c56e6c53759ea489218871b0: Status 404 returned error can't find the container with id d6486b4bbc28338df0de739db79a4a49c0ebf8b6c56e6c53759ea489218871b0 Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.503096 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.594577 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.676595 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q"] Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.680309 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00f471fe_001b_40e6_91be_92a8f71bc951.slice/crio-190a740a7a0a275704b2fc4ffd1a16036978f2c46923a8daa0f1ddf4f7cb36aa WatchSource:0}: Error finding container 190a740a7a0a275704b2fc4ffd1a16036978f2c46923a8daa0f1ddf4f7cb36aa: Status 404 returned error can't find the container with id 190a740a7a0a275704b2fc4ffd1a16036978f2c46923a8daa0f1ddf4f7cb36aa Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.682394 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" event={"ID":"447fe2d4-cd26-4a3b-9cb0-5834991e70f4","Type":"ContainerStarted","Data":"f1613f1b0fc65833489c4d12c9434f9be20f2667cdebc6ce3b044af07b5514d8"} Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.683167 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68710299_edf4_4c26_b035_a33e03f89d5f.slice/crio-8277d02b225820d42bf1c2e37164283f7e61890adc403757c5c5315b7d5d5b6a WatchSource:0}: Error finding container 8277d02b225820d42bf1c2e37164283f7e61890adc403757c5c5315b7d5d5b6a: Status 404 returned error can't find the container with id 8277d02b225820d42bf1c2e37164283f7e61890adc403757c5c5315b7d5d5b6a Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.683317 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-564965969-pksm4"] Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.683586 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hdd86,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-547cbdb99f-lb26q_openstack-operators(00f471fe-001b-40e6-91be-92a8f71bc951): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.684181 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" event={"ID":"b37c27a9-7292-41a4-92bb-584e9b492aa0","Type":"ContainerStarted","Data":"b16f78906d02256e06a015bf18a04fad1dca6441ed24060fc0474402b0f030ce"} Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.684905 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" podUID="00f471fe-001b-40e6-91be-92a8f71bc951" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.686857 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" event={"ID":"fb50fe13-a8bb-4115-85ab-677105257e40","Type":"ContainerStarted","Data":"7c306a0301038d446b4878f3a8f4cb28ce6ada7fee1300899ac02bc21663aa63"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.689111 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" event={"ID":"e9f2346a-220f-4638-8faa-c2eba1914417","Type":"ContainerStarted","Data":"38ba5ba9d6970e9dd7818dac82e5c7bb208f168064556aff595ea82d95096ec2"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.692017 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" event={"ID":"c0022f58-7356-41ad-a2da-362c9f9bfd73","Type":"ContainerStarted","Data":"d6486b4bbc28338df0de739db79a4a49c0ebf8b6c56e6c53759ea489218871b0"} Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.692101 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd2b4860_4a17_43e4_9909_afbe647519c4.slice/crio-b398879afebcc3ec2e9103b95e81c11b593929aa923ba96e0daf526f4776fe13 WatchSource:0}: Error finding container b398879afebcc3ec2e9103b95e81c11b593929aa923ba96e0daf526f4776fe13: Status 404 returned error can't find the container with id b398879afebcc3ec2e9103b95e81c11b593929aa923ba96e0daf526f4776fe13 Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.692659 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb"] Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.696412 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7dmtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-6f75f45d54-zb94x_openstack-operators(f7123b2b-df5a-4ae2-bfe6-f067fc318b5b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.696604 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" event={"ID":"9532dd1b-a2e9-4e25-b6ae-55f9d9e42859","Type":"ContainerStarted","Data":"e4c4b3fe1a3c4101e9340dc90259da09fdf75ac27b7cbe9a1726b63ad513299f"} Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.696437 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-29kf9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-9f7sb_openstack-operators(dd2b4860-4a17-43e4-9909-afbe647519c4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.696760 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zrgdv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-564965969-pksm4_openstack-operators(68710299-edf4-4c26-b035-a33e03f89d5f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.697609 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" podUID="f7123b2b-df5a-4ae2-bfe6-f067fc318b5b" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.697982 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" event={"ID":"6fe7b69f-8c3c-413a-b838-9abd3708a60c","Type":"ContainerStarted","Data":"1ccf8446fb351b097f1b0c6d651df6496b1be88ad0fcceef4c8cd769a5c2fc15"} Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.697973 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" podUID="dd2b4860-4a17-43e4-9909-afbe647519c4" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.698078 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" podUID="68710299-edf4-4c26-b035-a33e03f89d5f" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.699582 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" event={"ID":"e65ea7be-88a0-4d65-ac76-b1956c034abd","Type":"ContainerStarted","Data":"51ddb1bc1b18428fba9f84034c483df658ec8d523db88b4c1678b6c0baeeb1bc"} Jan 26 17:13:31 crc kubenswrapper[4865]: W0126 17:13:31.700052 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcf22a02_e91a_43a4_9e2b_738373b94769.slice/crio-690e5f89d8aac4743a540cccc02970e416067a303ad552878317f6e9f688ddbd WatchSource:0}: Error finding container 690e5f89d8aac4743a540cccc02970e416067a303ad552878317f6e9f688ddbd: Status 404 returned error can't find the container with id 690e5f89d8aac4743a540cccc02970e416067a303ad552878317f6e9f688ddbd Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.701397 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" event={"ID":"40656d26-d2a5-4728-a67c-1880fb430675","Type":"ContainerStarted","Data":"3d907a0c4607a65da2b52d74b443d37aaec0d73cb846db8228ca7eb3077f9a03"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.702674 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x"] Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.703082 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l2xnf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-69797bbcbd-pk5pz_openstack-operators(fcf22a02-e91a-43a4-9e2b-738373b94769): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.703656 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" event={"ID":"de67db31-b314-42ed-ac57-68f9fc5aab7c","Type":"ContainerStarted","Data":"908ace2c59c68262cda3981b0382e256e538bf40b66ea0aae2ce2f241a83600a"} Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.704557 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" podUID="fcf22a02-e91a-43a4-9e2b-738373b94769" Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.705248 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" event={"ID":"796eb6f5-8010-4397-9328-de3b605107be","Type":"ContainerStarted","Data":"0f04f0c1f10f5fd9780b4e99e590c120a4c8183bf19e1dde22e472db13e6b833"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.709256 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz"] Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.709546 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" event={"ID":"d0457508-04e0-4b3e-a84a-097a47ee346e","Type":"ContainerStarted","Data":"19b79244806dd84fc4825921b884e4a57846f614d5226b34cbb27ed324d0356f"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.711718 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" event={"ID":"1f630a72-d9f3-4af8-9b61-11b0551ad19f","Type":"ContainerStarted","Data":"0fb3d3c2c67ac6e45576ce1cf2a6be762c3cb423b0f4a30e4898d1a0c418b491"} Jan 26 17:13:31 crc kubenswrapper[4865]: I0126 17:13:31.891438 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.891708 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:31 crc kubenswrapper[4865]: E0126 17:13:31.891783 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:35.891759658 +0000 UTC m=+1143.475645245 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:32 crc kubenswrapper[4865]: I0126 17:13:32.728165 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" event={"ID":"00f471fe-001b-40e6-91be-92a8f71bc951","Type":"ContainerStarted","Data":"190a740a7a0a275704b2fc4ffd1a16036978f2c46923a8daa0f1ddf4f7cb36aa"} Jan 26 17:13:32 crc kubenswrapper[4865]: E0126 17:13:32.732869 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" podUID="00f471fe-001b-40e6-91be-92a8f71bc951" Jan 26 17:13:32 crc kubenswrapper[4865]: I0126 17:13:32.752498 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" event={"ID":"f7123b2b-df5a-4ae2-bfe6-f067fc318b5b","Type":"ContainerStarted","Data":"6bd96b7d507ea3d063956125f61858ad8acc7a600b1093c8b341a3ca2027f66f"} Jan 26 17:13:32 crc kubenswrapper[4865]: E0126 17:13:32.754300 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" podUID="f7123b2b-df5a-4ae2-bfe6-f067fc318b5b" Jan 26 17:13:32 crc kubenswrapper[4865]: I0126 17:13:32.763501 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" event={"ID":"dd2b4860-4a17-43e4-9909-afbe647519c4","Type":"ContainerStarted","Data":"b398879afebcc3ec2e9103b95e81c11b593929aa923ba96e0daf526f4776fe13"} Jan 26 17:13:32 crc kubenswrapper[4865]: E0126 17:13:32.765892 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" podUID="dd2b4860-4a17-43e4-9909-afbe647519c4" Jan 26 17:13:32 crc kubenswrapper[4865]: I0126 17:13:32.789058 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" event={"ID":"fcf22a02-e91a-43a4-9e2b-738373b94769","Type":"ContainerStarted","Data":"690e5f89d8aac4743a540cccc02970e416067a303ad552878317f6e9f688ddbd"} Jan 26 17:13:32 crc kubenswrapper[4865]: E0126 17:13:32.790894 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" podUID="fcf22a02-e91a-43a4-9e2b-738373b94769" Jan 26 17:13:32 crc kubenswrapper[4865]: I0126 17:13:32.793107 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" event={"ID":"68710299-edf4-4c26-b035-a33e03f89d5f","Type":"ContainerStarted","Data":"8277d02b225820d42bf1c2e37164283f7e61890adc403757c5c5315b7d5d5b6a"} Jan 26 17:13:32 crc kubenswrapper[4865]: E0126 17:13:32.794675 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" podUID="68710299-edf4-4c26-b035-a33e03f89d5f" Jan 26 17:13:33 crc kubenswrapper[4865]: I0126 17:13:33.163423 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:33 crc kubenswrapper[4865]: I0126 17:13:33.164396 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.163658 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.164795 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:37.16474666 +0000 UTC m=+1144.748632247 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.164638 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.165206 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:37.165184943 +0000 UTC m=+1144.749070530 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.805191 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:fa46fc14710961e6b4a76a3522dca3aa3cfa71436c7cf7ade533d3712822f327\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" podUID="f7123b2b-df5a-4ae2-bfe6-f067fc318b5b" Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.805270 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" podUID="dd2b4860-4a17-43e4-9909-afbe647519c4" Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.805335 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:445e951df2f21df6d33a466f75917e0f6103052ae751ae11887136e8ab165922\\\"\"" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" podUID="00f471fe-001b-40e6-91be-92a8f71bc951" Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.806147 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:c8dde42dafd41026ed2e4cfc26efc0fff63c4ba9d31326ae7dc644ccceaafa9d\\\"\"" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" podUID="fcf22a02-e91a-43a4-9e2b-738373b94769" Jan 26 17:13:33 crc kubenswrapper[4865]: E0126 17:13:33.806227 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:7869203f6f97de780368d507636031090fed3b658d2f7771acbd4481bdfc870b\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" podUID="68710299-edf4-4c26-b035-a33e03f89d5f" Jan 26 17:13:35 crc kubenswrapper[4865]: I0126 17:13:35.406015 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:35 crc kubenswrapper[4865]: E0126 17:13:35.406279 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:35 crc kubenswrapper[4865]: E0126 17:13:35.407111 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:43.407075726 +0000 UTC m=+1150.990961313 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:35 crc kubenswrapper[4865]: I0126 17:13:35.916693 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:35 crc kubenswrapper[4865]: E0126 17:13:35.916875 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:35 crc kubenswrapper[4865]: E0126 17:13:35.916941 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:43.916922393 +0000 UTC m=+1151.500807980 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:37 crc kubenswrapper[4865]: I0126 17:13:37.238360 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:37 crc kubenswrapper[4865]: I0126 17:13:37.238459 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:37 crc kubenswrapper[4865]: E0126 17:13:37.238588 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:37 crc kubenswrapper[4865]: E0126 17:13:37.238643 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:45.238628441 +0000 UTC m=+1152.822514028 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:37 crc kubenswrapper[4865]: E0126 17:13:37.238589 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:37 crc kubenswrapper[4865]: E0126 17:13:37.238735 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:13:45.238717903 +0000 UTC m=+1152.822603490 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:43 crc kubenswrapper[4865]: I0126 17:13:43.486403 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:43 crc kubenswrapper[4865]: E0126 17:13:43.486687 4865 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:43 crc kubenswrapper[4865]: E0126 17:13:43.487206 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert podName:374004dc-56e6-4af1-9d53-0a14477e623f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:59.487185123 +0000 UTC m=+1167.071070710 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert") pod "infra-operator-controller-manager-694cf4f878-6ljdm" (UID: "374004dc-56e6-4af1-9d53-0a14477e623f") : secret "infra-operator-webhook-server-cert" not found Jan 26 17:13:43 crc kubenswrapper[4865]: I0126 17:13:43.994192 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:13:43 crc kubenswrapper[4865]: E0126 17:13:43.994476 4865 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:43 crc kubenswrapper[4865]: E0126 17:13:43.994539 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert podName:4444365f-5137-411d-ba00-25c9f9a7390f nodeName:}" failed. No retries permitted until 2026-01-26 17:13:59.994524379 +0000 UTC m=+1167.578409966 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert") pod "openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" (UID: "4444365f-5137-411d-ba00-25c9f9a7390f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 26 17:13:45 crc kubenswrapper[4865]: I0126 17:13:45.317864 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:45 crc kubenswrapper[4865]: I0126 17:13:45.318031 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:13:45 crc kubenswrapper[4865]: E0126 17:13:45.318198 4865 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 26 17:13:45 crc kubenswrapper[4865]: E0126 17:13:45.318341 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:14:01.318311225 +0000 UTC m=+1168.902196812 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "metrics-server-cert" not found Jan 26 17:13:45 crc kubenswrapper[4865]: E0126 17:13:45.318332 4865 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 26 17:13:45 crc kubenswrapper[4865]: E0126 17:13:45.318474 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs podName:ab8ab562-a119-48c2-b431-abe25a789d74 nodeName:}" failed. No retries permitted until 2026-01-26 17:14:01.318446619 +0000 UTC m=+1168.902332206 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs") pod "openstack-operator-controller-manager-7fc674d489-dq2vn" (UID: "ab8ab562-a119-48c2-b431-abe25a789d74") : secret "webhook-server-cert" not found Jan 26 17:13:45 crc kubenswrapper[4865]: I0126 17:13:45.360241 4865 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.084599 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.085009 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xz7rg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-78c6999f6f-jdbmf_openstack-operators(9532dd1b-a2e9-4e25-b6ae-55f9d9e42859): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.086221 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" podUID="9532dd1b-a2e9-4e25-b6ae-55f9d9e42859" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.122606 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/manila-operator@sha256:8bee4480babd6fd8f686e0ba52a304acb6ffb90f09c7c57e7f5df5f7658836d8\\\"\"" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" podUID="9532dd1b-a2e9-4e25-b6ae-55f9d9e42859" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.817066 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:4d55bd6418df3f63f4d3fe47bebf3f5498a520b3e14af98fe16c85ef9fd54d5e" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.817340 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:4d55bd6418df3f63f4d3fe47bebf3f5498a520b3e14af98fe16c85ef9fd54d5e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6lqrk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-598f7747c9-kblgz_openstack-operators(1f630a72-d9f3-4af8-9b61-11b0551ad19f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:46 crc kubenswrapper[4865]: E0126 17:13:46.818654 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" podUID="1f630a72-d9f3-4af8-9b61-11b0551ad19f" Jan 26 17:13:47 crc kubenswrapper[4865]: E0126 17:13:47.133632 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:4d55bd6418df3f63f4d3fe47bebf3f5498a520b3e14af98fe16c85ef9fd54d5e\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" podUID="1f630a72-d9f3-4af8-9b61-11b0551ad19f" Jan 26 17:13:47 crc kubenswrapper[4865]: E0126 17:13:47.570718 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:b916c87806b7eadd83e0ca890c3c24fb990fc5beb48ddc4537e3384efd3e62f7" Jan 26 17:13:47 crc kubenswrapper[4865]: E0126 17:13:47.570922 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:b916c87806b7eadd83e0ca890c3c24fb990fc5beb48ddc4537e3384efd3e62f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hgzc7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-7478f7dbf9-8cwj6_openstack-operators(b37c27a9-7292-41a4-92bb-584e9b492aa0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:47 crc kubenswrapper[4865]: E0126 17:13:47.572194 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" podUID="b37c27a9-7292-41a4-92bb-584e9b492aa0" Jan 26 17:13:48 crc kubenswrapper[4865]: E0126 17:13:48.187718 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:b916c87806b7eadd83e0ca890c3c24fb990fc5beb48ddc4537e3384efd3e62f7\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" podUID="b37c27a9-7292-41a4-92bb-584e9b492aa0" Jan 26 17:13:57 crc kubenswrapper[4865]: E0126 17:13:57.759155 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822" Jan 26 17:13:57 crc kubenswrapper[4865]: E0126 17:13:57.759954 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fvmlk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-77d5c5b54f-t9v67_openstack-operators(796eb6f5-8010-4397-9328-de3b605107be): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:57 crc kubenswrapper[4865]: E0126 17:13:57.761159 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" podUID="796eb6f5-8010-4397-9328-de3b605107be" Jan 26 17:13:58 crc kubenswrapper[4865]: E0126 17:13:58.283511 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/horizon-operator@sha256:3311e627bcb860d9443592a2c67078417318c9eb77d8ef4d07f9aa7027d46822\\\"\"" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" podUID="796eb6f5-8010-4397-9328-de3b605107be" Jan 26 17:13:58 crc kubenswrapper[4865]: E0126 17:13:58.677979 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd" Jan 26 17:13:58 crc kubenswrapper[4865]: E0126 17:13:58.678236 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5hqdt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-5f4cd88d46-7c4wx_openstack-operators(d0457508-04e0-4b3e-a84a-097a47ee346e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:58 crc kubenswrapper[4865]: E0126 17:13:58.679964 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" podUID="d0457508-04e0-4b3e-a84a-097a47ee346e" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.148527 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.148756 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4mrl4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-79d5ccc684-v2ff5_openstack-operators(c0022f58-7356-41ad-a2da-362c9f9bfd73): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.150147 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" podUID="c0022f58-7356-41ad-a2da-362c9f9bfd73" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.293690 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:ed489f21a0c72557d2da5a271808f19b7c7b85ef32fd9f4aa91bdbfc5bca3bdd\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" podUID="d0457508-04e0-4b3e-a84a-097a47ee346e" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.295578 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:013c0ad82d21a21c7eece5cd4b5d5c4b8eb410b6671ac33a6f3fb78c8510811d\\\"\"" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" podUID="c0022f58-7356-41ad-a2da-362c9f9bfd73" Jan 26 17:13:59 crc kubenswrapper[4865]: I0126 17:13:59.523717 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:59 crc kubenswrapper[4865]: I0126 17:13:59.543861 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/374004dc-56e6-4af1-9d53-0a14477e623f-cert\") pod \"infra-operator-controller-manager-694cf4f878-6ljdm\" (UID: \"374004dc-56e6-4af1-9d53-0a14477e623f\") " pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.633773 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:816d474f502d730d6a2522a272b0e09a2d579ac63617817655d60c54bda4191e" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.633980 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:816d474f502d730d6a2522a272b0e09a2d579ac63617817655d60c54bda4191e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tc2zh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-78d58447c5-psb52_openstack-operators(e65ea7be-88a0-4d65-ac76-b1956c034abd): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:13:59 crc kubenswrapper[4865]: E0126 17:13:59.635670 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" podUID="e65ea7be-88a0-4d65-ac76-b1956c034abd" Jan 26 17:13:59 crc kubenswrapper[4865]: I0126 17:13:59.734853 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-mkvjx" Jan 26 17:13:59 crc kubenswrapper[4865]: I0126 17:13:59.743440 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:14:00 crc kubenswrapper[4865]: I0126 17:14:00.031765 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:14:00 crc kubenswrapper[4865]: I0126 17:14:00.036459 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4444365f-5137-411d-ba00-25c9f9a7390f-cert\") pod \"openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm\" (UID: \"4444365f-5137-411d-ba00-25c9f9a7390f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:14:00 crc kubenswrapper[4865]: I0126 17:14:00.248036 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-h2z45" Jan 26 17:14:00 crc kubenswrapper[4865]: I0126 17:14:00.256362 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:14:00 crc kubenswrapper[4865]: E0126 17:14:00.296817 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:816d474f502d730d6a2522a272b0e09a2d579ac63617817655d60c54bda4191e\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" podUID="e65ea7be-88a0-4d65-ac76-b1956c034abd" Jan 26 17:14:00 crc kubenswrapper[4865]: E0126 17:14:00.765510 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349" Jan 26 17:14:00 crc kubenswrapper[4865]: E0126 17:14:00.766200 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2z599,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-b8b6d4659-xgdcd_openstack-operators(de67db31-b314-42ed-ac57-68f9fc5aab7c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:14:00 crc kubenswrapper[4865]: E0126 17:14:00.767697 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" podUID="de67db31-b314-42ed-ac57-68f9fc5aab7c" Jan 26 17:14:01 crc kubenswrapper[4865]: E0126 17:14:01.304644 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:8e340ff11922b38e811261de96982e1aff5f4eb8f225d1d9f5973025a4fe8349\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" podUID="de67db31-b314-42ed-ac57-68f9fc5aab7c" Jan 26 17:14:01 crc kubenswrapper[4865]: E0126 17:14:01.327586 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492" Jan 26 17:14:01 crc kubenswrapper[4865]: E0126 17:14:01.327896 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zwhgg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-594c8c9d5d-c2786_openstack-operators(fb50fe13-a8bb-4115-85ab-677105257e40): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:14:01 crc kubenswrapper[4865]: E0126 17:14:01.329102 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" podUID="fb50fe13-a8bb-4115-85ab-677105257e40" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.351841 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.351920 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.358575 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-metrics-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.364896 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ab8ab562-a119-48c2-b431-abe25a789d74-webhook-certs\") pod \"openstack-operator-controller-manager-7fc674d489-dq2vn\" (UID: \"ab8ab562-a119-48c2-b431-abe25a789d74\") " pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.489009 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-jfvv6" Jan 26 17:14:01 crc kubenswrapper[4865]: I0126 17:14:01.498148 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:02 crc kubenswrapper[4865]: E0126 17:14:02.021393 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84" Jan 26 17:14:02 crc kubenswrapper[4865]: E0126 17:14:02.021642 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6hnqp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-6b9fb5fdcb-d2544_openstack-operators(40656d26-d2a5-4728-a67c-1880fb430675): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:14:02 crc kubenswrapper[4865]: E0126 17:14:02.023077 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" podUID="40656d26-d2a5-4728-a67c-1880fb430675" Jan 26 17:14:02 crc kubenswrapper[4865]: E0126 17:14:02.311207 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:2f9a2f064448faebbae58f52d564dc0e8e39bed0fc12bd6b9fe925e42f1b5492\\\"\"" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" podUID="fb50fe13-a8bb-4115-85ab-677105257e40" Jan 26 17:14:02 crc kubenswrapper[4865]: E0126 17:14:02.311296 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:b673f00227298dcfa89abb46f8296a0825add42da41e8a4bf4dd13367c738d84\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" podUID="40656d26-d2a5-4728-a67c-1880fb430675" Jan 26 17:14:04 crc kubenswrapper[4865]: I0126 17:14:04.511732 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:14:04 crc kubenswrapper[4865]: I0126 17:14:04.512205 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:14:05 crc kubenswrapper[4865]: E0126 17:14:05.319600 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.22:5001/openstack-k8s-operators/nova-operator:bd20538f05b1bec15ff9ee74b9f39bcec3797f36" Jan 26 17:14:05 crc kubenswrapper[4865]: E0126 17:14:05.320042 4865 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.22:5001/openstack-k8s-operators/nova-operator:bd20538f05b1bec15ff9ee74b9f39bcec3797f36" Jan 26 17:14:05 crc kubenswrapper[4865]: E0126 17:14:05.320194 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.22:5001/openstack-k8s-operators/nova-operator:bd20538f05b1bec15ff9ee74b9f39bcec3797f36,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qhxgl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-7fff5bf694-l9xql_openstack-operators(6fe7b69f-8c3c-413a-b838-9abd3708a60c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:14:05 crc kubenswrapper[4865]: E0126 17:14:05.322234 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" Jan 26 17:14:05 crc kubenswrapper[4865]: E0126 17:14:05.333305 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.22:5001/openstack-k8s-operators/nova-operator:bd20538f05b1bec15ff9ee74b9f39bcec3797f36\\\"\"" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.176197 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm"] Jan 26 17:14:06 crc kubenswrapper[4865]: W0126 17:14:06.267940 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4444365f_5137_411d_ba00_25c9f9a7390f.slice/crio-b11d2378cfdd5015a85184a2c1ae4af6c9c1ab574765637ba3ff719dce97eb76 WatchSource:0}: Error finding container b11d2378cfdd5015a85184a2c1ae4af6c9c1ab574765637ba3ff719dce97eb76: Status 404 returned error can't find the container with id b11d2378cfdd5015a85184a2c1ae4af6c9c1ab574765637ba3ff719dce97eb76 Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.271890 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm"] Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.281373 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn"] Jan 26 17:14:06 crc kubenswrapper[4865]: W0126 17:14:06.290880 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab8ab562_a119_48c2_b431_abe25a789d74.slice/crio-e29db26b36f041c2988fcbc20f98feef79513e610a6185b40de060bc4b81bb54 WatchSource:0}: Error finding container e29db26b36f041c2988fcbc20f98feef79513e610a6185b40de060bc4b81bb54: Status 404 returned error can't find the container with id e29db26b36f041c2988fcbc20f98feef79513e610a6185b40de060bc4b81bb54 Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.348517 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" event={"ID":"1f630a72-d9f3-4af8-9b61-11b0551ad19f","Type":"ContainerStarted","Data":"841a48c522489c0417eaeb396241cd5b370456915f494167d107302dee5d434f"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.349696 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.355085 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" event={"ID":"dd2b4860-4a17-43e4-9909-afbe647519c4","Type":"ContainerStarted","Data":"8ce7bef3f53a61467537eec45280500b0d6e8a1befbe2f47e7a0ccdba342e0fe"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.371652 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.371685 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" event={"ID":"fcf22a02-e91a-43a4-9e2b-738373b94769","Type":"ContainerStarted","Data":"c813a4cbd08ab845fbdb04a5a5bb5c82f34767075566eef908f2acdd1f31773e"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.371702 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" event={"ID":"b37c27a9-7292-41a4-92bb-584e9b492aa0","Type":"ContainerStarted","Data":"f78c4ca3689ffa42e85dfc822ab776a590fba435285dc6ddc232210418f96c51"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.371712 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" event={"ID":"e9f2346a-220f-4638-8faa-c2eba1914417","Type":"ContainerStarted","Data":"f64b64b9253184ada54738c4068694cc19b0a6d70df07ac548c8738969ae9bd5"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.372321 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.372472 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.373523 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" event={"ID":"00f471fe-001b-40e6-91be-92a8f71bc951","Type":"ContainerStarted","Data":"3263379da3ce420558d42f7ef53093402aa4ee74b728c3a74b0fd83777186282"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.373879 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.380968 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" event={"ID":"f7123b2b-df5a-4ae2-bfe6-f067fc318b5b","Type":"ContainerStarted","Data":"3a3d6203d6012b64893a73fcb2a0e7efbe4fc177a15366c9210dd12d86820490"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.381565 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.388075 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" podStartSLOduration=4.843200348 podStartE2EDuration="39.388056602s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.196911844 +0000 UTC m=+1138.780797431" lastFinishedPulling="2026-01-26 17:14:05.741768098 +0000 UTC m=+1173.325653685" observedRunningTime="2026-01-26 17:14:06.381811405 +0000 UTC m=+1173.965696992" watchObservedRunningTime="2026-01-26 17:14:06.388056602 +0000 UTC m=+1173.971942209" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.395649 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" event={"ID":"68710299-edf4-4c26-b035-a33e03f89d5f","Type":"ContainerStarted","Data":"07389113b0799169899fa0476fb7964c74d064b1680e70f66b94c3380e853c70"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.395959 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.408248 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" event={"ID":"9532dd1b-a2e9-4e25-b6ae-55f9d9e42859","Type":"ContainerStarted","Data":"f83a6b60a07e2d9d8a74fd55a96a7d615c4542cf719a5a869f4a1e0018e3f6b3"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.409087 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.417169 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" event={"ID":"4444365f-5137-411d-ba00-25c9f9a7390f","Type":"ContainerStarted","Data":"b11d2378cfdd5015a85184a2c1ae4af6c9c1ab574765637ba3ff719dce97eb76"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.419317 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-9f7sb" podStartSLOduration=3.324753921 podStartE2EDuration="37.41929803s" podCreationTimestamp="2026-01-26 17:13:29 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.696312764 +0000 UTC m=+1139.280198351" lastFinishedPulling="2026-01-26 17:14:05.790856873 +0000 UTC m=+1173.374742460" observedRunningTime="2026-01-26 17:14:06.41753593 +0000 UTC m=+1174.001421537" watchObservedRunningTime="2026-01-26 17:14:06.41929803 +0000 UTC m=+1174.003183617" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.424587 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" event={"ID":"ab8ab562-a119-48c2-b431-abe25a789d74","Type":"ContainerStarted","Data":"e29db26b36f041c2988fcbc20f98feef79513e610a6185b40de060bc4b81bb54"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.432966 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" event={"ID":"374004dc-56e6-4af1-9d53-0a14477e623f","Type":"ContainerStarted","Data":"16fc1c298ddec9246f2574e0c42ffe954b2875b630d7bef23ffbae4473e598f9"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.435700 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" event={"ID":"447fe2d4-cd26-4a3b-9cb0-5834991e70f4","Type":"ContainerStarted","Data":"ccb5aa0734bc166552c0acfa4b2197d2102233f0075b6e873caa61837a7ef8f5"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.436015 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.450445 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" event={"ID":"e4eb0e9f-c220-4e50-9a7e-89da7fe708d9","Type":"ContainerStarted","Data":"15e6797c0432e8707d1ba5364224181dcb94f4247a6fdb8c512ae705551bffac"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.451145 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.456332 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" podStartSLOduration=5.460607582 podStartE2EDuration="39.456316052s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.696251193 +0000 UTC m=+1139.280136780" lastFinishedPulling="2026-01-26 17:14:05.691959663 +0000 UTC m=+1173.275845250" observedRunningTime="2026-01-26 17:14:06.450820996 +0000 UTC m=+1174.034706603" watchObservedRunningTime="2026-01-26 17:14:06.456316052 +0000 UTC m=+1174.040201639" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.464231 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" event={"ID":"eababfca-3249-49df-b6d4-364e669e4b1e","Type":"ContainerStarted","Data":"7b8b368d5db06e359a8bac7127bcd824e9ce42f0555795c6bc61e2629905e2e9"} Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.464729 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.483440 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" podStartSLOduration=5.522995314 podStartE2EDuration="39.483424092s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.702821749 +0000 UTC m=+1139.286707336" lastFinishedPulling="2026-01-26 17:14:05.663250527 +0000 UTC m=+1173.247136114" observedRunningTime="2026-01-26 17:14:06.481318612 +0000 UTC m=+1174.065204219" watchObservedRunningTime="2026-01-26 17:14:06.483424092 +0000 UTC m=+1174.067309669" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.503861 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" podStartSLOduration=7.93285793 podStartE2EDuration="39.503839472s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.603634011 +0000 UTC m=+1139.187519588" lastFinishedPulling="2026-01-26 17:14:03.174615533 +0000 UTC m=+1170.758501130" observedRunningTime="2026-01-26 17:14:06.501455315 +0000 UTC m=+1174.085340902" watchObservedRunningTime="2026-01-26 17:14:06.503839472 +0000 UTC m=+1174.087725059" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.586669 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" podStartSLOduration=5.113426038 podStartE2EDuration="39.586630475s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.269621511 +0000 UTC m=+1138.853507098" lastFinishedPulling="2026-01-26 17:14:05.742825948 +0000 UTC m=+1173.326711535" observedRunningTime="2026-01-26 17:14:06.526220968 +0000 UTC m=+1174.110106575" watchObservedRunningTime="2026-01-26 17:14:06.586630475 +0000 UTC m=+1174.170516062" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.588216 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" podStartSLOduration=5.517716975 podStartE2EDuration="39.58820652s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.683410358 +0000 UTC m=+1139.267295945" lastFinishedPulling="2026-01-26 17:14:05.753899903 +0000 UTC m=+1173.337785490" observedRunningTime="2026-01-26 17:14:06.55233873 +0000 UTC m=+1174.136224307" watchObservedRunningTime="2026-01-26 17:14:06.58820652 +0000 UTC m=+1174.172092107" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.615614 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" podStartSLOduration=4.622532633 podStartE2EDuration="38.615588058s" podCreationTimestamp="2026-01-26 17:13:28 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.696660994 +0000 UTC m=+1139.280546581" lastFinishedPulling="2026-01-26 17:14:05.689716419 +0000 UTC m=+1173.273602006" observedRunningTime="2026-01-26 17:14:06.58926919 +0000 UTC m=+1174.173154777" watchObservedRunningTime="2026-01-26 17:14:06.615588058 +0000 UTC m=+1174.199473645" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.637145 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" podStartSLOduration=5.669990373 podStartE2EDuration="39.63712249s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:30.612124838 +0000 UTC m=+1138.196010415" lastFinishedPulling="2026-01-26 17:14:04.579256945 +0000 UTC m=+1172.163142532" observedRunningTime="2026-01-26 17:14:06.635767041 +0000 UTC m=+1174.219652628" watchObservedRunningTime="2026-01-26 17:14:06.63712249 +0000 UTC m=+1174.221008077" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.667369 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" podStartSLOduration=5.504830059 podStartE2EDuration="39.667349309s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.500812189 +0000 UTC m=+1139.084697776" lastFinishedPulling="2026-01-26 17:14:05.663331419 +0000 UTC m=+1173.247217026" observedRunningTime="2026-01-26 17:14:06.660535735 +0000 UTC m=+1174.244421322" watchObservedRunningTime="2026-01-26 17:14:06.667349309 +0000 UTC m=+1174.251234896" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.703065 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" podStartSLOduration=7.836818932 podStartE2EDuration="39.703044563s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:30.148702509 +0000 UTC m=+1137.732588096" lastFinishedPulling="2026-01-26 17:14:02.0149281 +0000 UTC m=+1169.598813727" observedRunningTime="2026-01-26 17:14:06.696169078 +0000 UTC m=+1174.280054665" watchObservedRunningTime="2026-01-26 17:14:06.703044563 +0000 UTC m=+1174.286930170" Jan 26 17:14:06 crc kubenswrapper[4865]: I0126 17:14:06.729351 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" podStartSLOduration=9.019129898 podStartE2EDuration="39.72932734s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:30.601735722 +0000 UTC m=+1138.185621339" lastFinishedPulling="2026-01-26 17:14:01.311933194 +0000 UTC m=+1168.895818781" observedRunningTime="2026-01-26 17:14:06.723452573 +0000 UTC m=+1174.307338160" watchObservedRunningTime="2026-01-26 17:14:06.72932734 +0000 UTC m=+1174.313212937" Jan 26 17:14:07 crc kubenswrapper[4865]: I0126 17:14:07.491474 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" event={"ID":"ab8ab562-a119-48c2-b431-abe25a789d74","Type":"ContainerStarted","Data":"ca07505dabd13c47f3daa441d8beb9cb8bcaa9c70a2d4a5aff6eb16d59d5ab10"} Jan 26 17:14:07 crc kubenswrapper[4865]: I0126 17:14:07.493868 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:07 crc kubenswrapper[4865]: I0126 17:14:07.545608 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" podStartSLOduration=39.545567373 podStartE2EDuration="39.545567373s" podCreationTimestamp="2026-01-26 17:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:14:07.545007537 +0000 UTC m=+1175.128893124" watchObservedRunningTime="2026-01-26 17:14:07.545567373 +0000 UTC m=+1175.129452970" Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.518692 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" event={"ID":"374004dc-56e6-4af1-9d53-0a14477e623f","Type":"ContainerStarted","Data":"6cb04ea6b9b249ba56908b32b2e9669cd71a0782466fccbfa8b25b2b22d6163e"} Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.519210 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.520237 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" event={"ID":"4444365f-5137-411d-ba00-25c9f9a7390f","Type":"ContainerStarted","Data":"f8ec178a99eb0b463327bd7e9c9e7b95a0619d19967366b46e768ba69e50fcc7"} Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.520388 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.557961 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" podStartSLOduration=39.691168166 podStartE2EDuration="43.557942251s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:14:06.271649565 +0000 UTC m=+1173.855535152" lastFinishedPulling="2026-01-26 17:14:10.13842365 +0000 UTC m=+1177.722309237" observedRunningTime="2026-01-26 17:14:10.556393017 +0000 UTC m=+1178.140278604" watchObservedRunningTime="2026-01-26 17:14:10.557942251 +0000 UTC m=+1178.141827838" Jan 26 17:14:10 crc kubenswrapper[4865]: I0126 17:14:10.563859 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" podStartSLOduration=39.619081977 podStartE2EDuration="43.563838658s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:14:06.18985087 +0000 UTC m=+1173.773736467" lastFinishedPulling="2026-01-26 17:14:10.134607561 +0000 UTC m=+1177.718493148" observedRunningTime="2026-01-26 17:14:10.534756482 +0000 UTC m=+1178.118642079" watchObservedRunningTime="2026-01-26 17:14:10.563838658 +0000 UTC m=+1178.147724245" Jan 26 17:14:11 crc kubenswrapper[4865]: I0126 17:14:11.507679 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-7fc674d489-dq2vn" Jan 26 17:14:13 crc kubenswrapper[4865]: I0126 17:14:13.540660 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" event={"ID":"796eb6f5-8010-4397-9328-de3b605107be","Type":"ContainerStarted","Data":"8bd6818b564449040fe37ea59232714d96377c7270a37ed3b2f9c9ee7a828138"} Jan 26 17:14:13 crc kubenswrapper[4865]: I0126 17:14:13.541269 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:14:13 crc kubenswrapper[4865]: I0126 17:14:13.562684 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" podStartSLOduration=5.259438067 podStartE2EDuration="46.56265919s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.22489799 +0000 UTC m=+1138.808783567" lastFinishedPulling="2026-01-26 17:14:12.528119103 +0000 UTC m=+1180.112004690" observedRunningTime="2026-01-26 17:14:13.557602487 +0000 UTC m=+1181.141488074" watchObservedRunningTime="2026-01-26 17:14:13.56265919 +0000 UTC m=+1181.146544777" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.548336 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" event={"ID":"40656d26-d2a5-4728-a67c-1880fb430675","Type":"ContainerStarted","Data":"56c8ec5a791103a1763ec7e583bde91eca0c26b70ea1f63dd8c4d666a203077f"} Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.548862 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.549934 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" event={"ID":"de67db31-b314-42ed-ac57-68f9fc5aab7c","Type":"ContainerStarted","Data":"599d9719b5f1ea2efec72c9d3bf45f7861bff9ba6f41460b6257f1b95baea5f2"} Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.550122 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.578526 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" podStartSLOduration=5.156640854 podStartE2EDuration="47.578505765s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.169598198 +0000 UTC m=+1138.753483785" lastFinishedPulling="2026-01-26 17:14:13.591463109 +0000 UTC m=+1181.175348696" observedRunningTime="2026-01-26 17:14:14.575196841 +0000 UTC m=+1182.159082428" watchObservedRunningTime="2026-01-26 17:14:14.578505765 +0000 UTC m=+1182.162391352" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:14.580500 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" podStartSLOduration=5.495511063 podStartE2EDuration="47.580491311s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.510589117 +0000 UTC m=+1139.094474704" lastFinishedPulling="2026-01-26 17:14:13.595569365 +0000 UTC m=+1181.179454952" observedRunningTime="2026-01-26 17:14:14.563272462 +0000 UTC m=+1182.147158049" watchObservedRunningTime="2026-01-26 17:14:14.580491311 +0000 UTC m=+1182.164376898" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:17.825799 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-7478f7dbf9-8cwj6" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:17.826871 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7f86f8796f-bkj4l" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:17.841052 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-b45d7bf98-l5z5s" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:17.908707 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-78fdd796fd-jjnkn" Jan 26 17:14:17 crc kubenswrapper[4865]: I0126 17:14:17.930534 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-t9v67" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.039912 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-598f7747c9-kblgz" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.205964 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-b8b6d4659-xgdcd" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.238680 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-78c6999f6f-jdbmf" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.253796 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6b9fb5fdcb-d2544" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.779883 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-6f75f45d54-zb94x" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.854815 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-85cd9769bb-gwrzm" Jan 26 17:14:18 crc kubenswrapper[4865]: I0126 17:14:18.925289 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-547cbdb99f-lb26q" Jan 26 17:14:19 crc kubenswrapper[4865]: I0126 17:14:19.710317 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-69797bbcbd-pk5pz" Jan 26 17:14:19 crc kubenswrapper[4865]: I0126 17:14:19.756062 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-694cf4f878-6ljdm" Jan 26 17:14:19 crc kubenswrapper[4865]: I0126 17:14:19.813368 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-564965969-pksm4" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.262622 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.597982 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" event={"ID":"6fe7b69f-8c3c-413a-b838-9abd3708a60c","Type":"ContainerStarted","Data":"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02"} Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.598227 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.599626 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" event={"ID":"e65ea7be-88a0-4d65-ac76-b1956c034abd","Type":"ContainerStarted","Data":"3af5e5ad4f0aaa6c5b34233893b82f072e5d54712a8988d9b6a8f6d5c9bad281"} Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.599781 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.601117 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" event={"ID":"fb50fe13-a8bb-4115-85ab-677105257e40","Type":"ContainerStarted","Data":"9f2763585c46fe15030b9b6fc49a3ac89eface589553ac594cf8051f58fb42ef"} Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.601348 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.602685 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" event={"ID":"c0022f58-7356-41ad-a2da-362c9f9bfd73","Type":"ContainerStarted","Data":"d4bbdfb85475b8e249851c9df4f2ec331f82fefa1141a717e4c26ac7061c8df5"} Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.603134 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.605070 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" event={"ID":"d0457508-04e0-4b3e-a84a-097a47ee346e","Type":"ContainerStarted","Data":"3940442d4d4d47bd08ee106e0ec979d0f5d3cf60285654df17e488252f517623"} Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.605787 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.616328 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" podStartSLOduration=5.390633093 podStartE2EDuration="53.616304269s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.435549375 +0000 UTC m=+1139.019434972" lastFinishedPulling="2026-01-26 17:14:19.661220561 +0000 UTC m=+1187.245106148" observedRunningTime="2026-01-26 17:14:20.614456367 +0000 UTC m=+1188.198341954" watchObservedRunningTime="2026-01-26 17:14:20.616304269 +0000 UTC m=+1188.200189866" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.637213 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" podStartSLOduration=5.445893224 podStartE2EDuration="53.637189463s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.469936492 +0000 UTC m=+1139.053822079" lastFinishedPulling="2026-01-26 17:14:19.661232741 +0000 UTC m=+1187.245118318" observedRunningTime="2026-01-26 17:14:20.63286594 +0000 UTC m=+1188.216751527" watchObservedRunningTime="2026-01-26 17:14:20.637189463 +0000 UTC m=+1188.221075050" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.658375 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" podStartSLOduration=6.115041847 podStartE2EDuration="53.658350744s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.473084261 +0000 UTC m=+1139.056969848" lastFinishedPulling="2026-01-26 17:14:19.016393158 +0000 UTC m=+1186.600278745" observedRunningTime="2026-01-26 17:14:20.647171647 +0000 UTC m=+1188.231057234" watchObservedRunningTime="2026-01-26 17:14:20.658350744 +0000 UTC m=+1188.242236331" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.686090 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" podStartSLOduration=5.86519619 podStartE2EDuration="53.686063522s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.197233144 +0000 UTC m=+1138.781118721" lastFinishedPulling="2026-01-26 17:14:19.018100466 +0000 UTC m=+1186.601986053" observedRunningTime="2026-01-26 17:14:20.671381014 +0000 UTC m=+1188.255266621" watchObservedRunningTime="2026-01-26 17:14:20.686063522 +0000 UTC m=+1188.269949109" Jan 26 17:14:20 crc kubenswrapper[4865]: I0126 17:14:20.705456 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" podStartSLOduration=6.192146609 podStartE2EDuration="53.705438702s" podCreationTimestamp="2026-01-26 17:13:27 +0000 UTC" firstStartedPulling="2026-01-26 17:13:31.503437874 +0000 UTC m=+1139.087323461" lastFinishedPulling="2026-01-26 17:14:19.016729967 +0000 UTC m=+1186.600615554" observedRunningTime="2026-01-26 17:14:20.704448454 +0000 UTC m=+1188.288334051" watchObservedRunningTime="2026-01-26 17:14:20.705438702 +0000 UTC m=+1188.289324289" Jan 26 17:14:27 crc kubenswrapper[4865]: I0126 17:14:27.889707 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-c2786" Jan 26 17:14:28 crc kubenswrapper[4865]: I0126 17:14:28.284568 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:14:28 crc kubenswrapper[4865]: I0126 17:14:28.471710 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-79d5ccc684-v2ff5" Jan 26 17:14:28 crc kubenswrapper[4865]: I0126 17:14:28.613862 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-5f4cd88d46-7c4wx" Jan 26 17:14:28 crc kubenswrapper[4865]: I0126 17:14:28.817386 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-78d58447c5-psb52" Jan 26 17:14:34 crc kubenswrapper[4865]: I0126 17:14:34.512321 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:14:34 crc kubenswrapper[4865]: I0126 17:14:34.513521 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.095489 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.097822 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.110626 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-plugins-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.110868 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-server-dockercfg-p5flf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.110912 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-server-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.110940 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-default-user" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.110868 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openshift-service-ca.crt" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.114185 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"kube-root-ca.crt" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.114349 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-erlang-cookie" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.145629 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.245771 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31ccc966-f964-468b-a364-0bb3360a4933-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246118 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246211 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31ccc966-f964-468b-a364-0bb3360a4933-pod-info\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246301 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9m6g\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-kube-api-access-w9m6g\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246427 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-server-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246529 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d14adb11-76aa-430c-84cb-18104d572fd4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d14adb11-76aa-430c-84cb-18104d572fd4\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246617 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246692 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.246767 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.313362 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.314865 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.319389 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-plugins-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.324535 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.324745 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-server-dockercfg-bwn8j" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.324894 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-erlang-cookie" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.334764 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-broadcaster-default-user" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.341882 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348802 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31ccc966-f964-468b-a364-0bb3360a4933-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348850 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348877 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31ccc966-f964-468b-a364-0bb3360a4933-pod-info\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348894 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9m6g\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-kube-api-access-w9m6g\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348925 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-server-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348960 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d14adb11-76aa-430c-84cb-18104d572fd4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d14adb11-76aa-430c-84cb-18104d572fd4\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.348985 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.349022 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.349053 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.360790 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.366362 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-server-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.374651 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.374893 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/31ccc966-f964-468b-a364-0bb3360a4933-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.410637 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/31ccc966-f964-468b-a364-0bb3360a4933-pod-info\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.421710 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/31ccc966-f964-468b-a364-0bb3360a4933-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.423199 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.427347 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.436013 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d14adb11-76aa-430c-84cb-18104d572fd4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d14adb11-76aa-430c-84cb-18104d572fd4\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1de9eb7dc8df00458d780a7a3e2f61334ec3555f6669813b4e0cb24e5cfb76f2/globalmount\"" pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.432802 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9m6g\" (UniqueName: \"kubernetes.io/projected/31ccc966-f964-468b-a364-0bb3360a4933-kube-api-access-w9m6g\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.455842 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.455927 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.455978 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456082 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456161 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f64cg\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-kube-api-access-f64cg\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456195 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456231 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456288 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.456324 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.503706 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d14adb11-76aa-430c-84cb-18104d572fd4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d14adb11-76aa-430c-84cb-18104d572fd4\") pod \"rabbitmq-server-0\" (UID: \"31ccc966-f964-468b-a364-0bb3360a4933\") " pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557122 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557330 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557398 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557429 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557449 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557483 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557532 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f64cg\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-kube-api-access-f64cg\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557560 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557591 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.557630 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.558728 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-erlang-cookie\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.559385 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-plugins\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.559706 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.559737 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/71fb855d1637cb59b57ef52a214d28f061816bf0415f64441117907059cbc926/globalmount\"" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.561298 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-server-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.561659 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-plugins-conf\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.561944 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.561967 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-erlang-cookie-secret\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.564111 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-pod-info\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.565322 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-plugins-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.565420 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-default-user" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.565605 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"rabbitmq-cell1-server-conf" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.565668 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-erlang-cookie" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.565800 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"rabbitmq-cell1-server-dockercfg-2qd6z" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.572040 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-rabbitmq-confd\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.586439 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f64cg\" (UniqueName: \"kubernetes.io/projected/13ed1e3f-65dc-47e3-a8bc-c33f37660f23-kube-api-access-f64cg\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.615665 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9d5a3506-87c0-41b0-8da2-b0b6897d58be\") pod \"rabbitmq-broadcaster-server-0\" (UID: \"13ed1e3f-65dc-47e3-a8bc-c33f37660f23\") " pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.618891 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659355 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkm4r\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-kube-api-access-qkm4r\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659414 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b48bb4a-936f-491b-b54a-a66bfcac547d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659443 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659479 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659513 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659547 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659577 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b48bb4a-936f-491b-b54a-a66bfcac547d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659593 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.659612 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.661715 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.728866 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761250 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761316 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b48bb4a-936f-491b-b54a-a66bfcac547d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761340 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761359 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761396 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkm4r\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-kube-api-access-qkm4r\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761417 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b48bb4a-936f-491b-b54a-a66bfcac547d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761438 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761481 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761511 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.761919 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.762313 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.762412 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.763853 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0b48bb4a-936f-491b-b54a-a66bfcac547d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.770059 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.770322 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0b48bb4a-936f-491b-b54a-a66bfcac547d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.770393 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.770429 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/21425cab0efe99a82a0811e0fee152f7407e92e7962bbf6af8f39e7cfe46a6f1/globalmount\"" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.771022 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0b48bb4a-936f-491b-b54a-a66bfcac547d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.790162 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkm4r\" (UniqueName: \"kubernetes.io/projected/0b48bb4a-936f-491b-b54a-a66bfcac547d-kube-api-access-qkm4r\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.822912 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-40f3bdd8-1cce-4ab1-8331-ac444b1dbe0d\") pod \"rabbitmq-cell1-server-0\" (UID: \"0b48bb4a-936f-491b-b54a-a66bfcac547d\") " pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:41 crc kubenswrapper[4865]: I0126 17:14:41.928755 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.000472 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-broadcaster-server-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: W0126 17:14:42.016981 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-131c296989163c45e3630cfe309f68059fb2113e9c14a17bc7bd30710a124034 WatchSource:0}: Error finding container 131c296989163c45e3630cfe309f68059fb2113e9c14a17bc7bd30710a124034: Status 404 returned error can't find the container with id 131c296989163c45e3630cfe309f68059fb2113e9c14a17bc7bd30710a124034 Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.070863 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-server-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.257853 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"31ccc966-f964-468b-a364-0bb3360a4933","Type":"ContainerStarted","Data":"2a3762f767717bed08a8fbf81dd10c83a879a1a4819c52ba7ee8696ba2aee3db"} Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.260696 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"13ed1e3f-65dc-47e3-a8bc-c33f37660f23","Type":"ContainerStarted","Data":"131c296989163c45e3630cfe309f68059fb2113e9c14a17bc7bd30710a124034"} Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.269183 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.271599 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.277852 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-svc" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.278308 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config-data" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.279487 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-scripts" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.282506 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-dockercfg-jx8p5" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.290421 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"combined-ca-bundle" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.291050 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375365 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375410 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-default\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375434 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375469 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375558 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59s79\" (UniqueName: \"kubernetes.io/projected/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kube-api-access-59s79\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375579 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375658 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.375694 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kolla-config\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.469350 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.470969 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.474769 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"memcached-memcached-dockercfg-wtg2q" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.481481 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"memcached-config-data" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482402 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kolla-config\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482484 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482509 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-default\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482540 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482572 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482632 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59s79\" (UniqueName: \"kubernetes.io/projected/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kube-api-access-59s79\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482664 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.482725 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.484260 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-default\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.484331 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.485187 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kolla-config\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.486422 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.487910 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/rabbitmq-cell1-server-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.497312 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.507722 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.509015 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.520175 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.520232 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/11b821c412321f6f03270174454c69ab868c9fa6e8f4e3e45a4e8b8385c14f9b/globalmount\"" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.551419 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59s79\" (UniqueName: \"kubernetes.io/projected/2ae87188-ebdc-44a7-8504-5f61c2e4ea9a-kube-api-access-59s79\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.586531 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-kolla-config\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.586661 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-config-data\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.586772 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4j2z\" (UniqueName: \"kubernetes.io/projected/409c7131-e3b9-4bbc-a7db-51f9c150e354-kube-api-access-s4j2z\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.591021 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-dcb5414b-3b0e-43fb-b1e7-0940f39708ce\") pod \"openstack-galera-0\" (UID: \"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a\") " pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.598900 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.688675 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4j2z\" (UniqueName: \"kubernetes.io/projected/409c7131-e3b9-4bbc-a7db-51f9c150e354-kube-api-access-s4j2z\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.688960 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-kolla-config\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.689218 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-config-data\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.690098 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-kolla-config\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.690255 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/409c7131-e3b9-4bbc-a7db-51f9c150e354-config-data\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.708354 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4j2z\" (UniqueName: \"kubernetes.io/projected/409c7131-e3b9-4bbc-a7db-51f9c150e354-kube-api-access-s4j2z\") pod \"memcached-0\" (UID: \"409c7131-e3b9-4bbc-a7db-51f9c150e354\") " pod="nova-kuttl-default/memcached-0" Jan 26 17:14:42 crc kubenswrapper[4865]: I0126 17:14:42.906280 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/memcached-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.101327 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-galera-0"] Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.288271 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a","Type":"ContainerStarted","Data":"3a17536082758f2d304f80f01f98a507de6cefb9dd06f1c404f9114203bd9a7d"} Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.289914 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"0b48bb4a-936f-491b-b54a-a66bfcac547d","Type":"ContainerStarted","Data":"cc2d134d35a19bf274df8835f3319883465e5d47a5333060d3fbe7bbf4c3a65b"} Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.361780 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/memcached-0"] Jan 26 17:14:43 crc kubenswrapper[4865]: W0126 17:14:43.372383 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod409c7131_e3b9_4bbc_a7db_51f9c150e354.slice/crio-a153712aa9a4485a46afd7509a90161dd7b84ac765f35dbedd09fb447daac3ff WatchSource:0}: Error finding container a153712aa9a4485a46afd7509a90161dd7b84ac765f35dbedd09fb447daac3ff: Status 404 returned error can't find the container with id a153712aa9a4485a46afd7509a90161dd7b84ac765f35dbedd09fb447daac3ff Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.656468 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.658128 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.664035 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-config-data" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.664216 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"cert-galera-openstack-cell1-svc" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.664079 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"galera-openstack-cell1-dockercfg-4z728" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.665032 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-cell1-scripts" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.672915 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.806904 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.806985 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807107 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rx8m4\" (UniqueName: \"kubernetes.io/projected/f970211e-cd46-409d-abc3-09d13259c370-kube-api-access-rx8m4\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807180 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807207 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807240 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807272 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f970211e-cd46-409d-abc3-09d13259c370-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.807365 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.911908 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f970211e-cd46-409d-abc3-09d13259c370-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.911968 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912014 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912060 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912116 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rx8m4\" (UniqueName: \"kubernetes.io/projected/f970211e-cd46-409d-abc3-09d13259c370-kube-api-access-rx8m4\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912150 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912168 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.912191 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.913063 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f970211e-cd46-409d-abc3-09d13259c370-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.913691 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.913807 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.914682 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f970211e-cd46-409d-abc3-09d13259c370-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.919121 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.919842 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f970211e-cd46-409d-abc3-09d13259c370-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.917262 4865 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.924183 4865 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6c252b5782e8423b86bae076f1b74135740c7de5c60a1854b9ca03ff6a51249c/globalmount\"" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.936912 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rx8m4\" (UniqueName: \"kubernetes.io/projected/f970211e-cd46-409d-abc3-09d13259c370-kube-api-access-rx8m4\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.966123 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5295f977-dea5-4e69-95f9-b640d6993f0d\") pod \"openstack-cell1-galera-0\" (UID: \"f970211e-cd46-409d-abc3-09d13259c370\") " pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:43 crc kubenswrapper[4865]: I0126 17:14:43.990643 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:14:44 crc kubenswrapper[4865]: I0126 17:14:44.482137 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstack-cell1-galera-0"] Jan 26 17:14:44 crc kubenswrapper[4865]: I0126 17:14:44.482727 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"409c7131-e3b9-4bbc-a7db-51f9c150e354","Type":"ContainerStarted","Data":"a153712aa9a4485a46afd7509a90161dd7b84ac765f35dbedd09fb447daac3ff"} Jan 26 17:14:45 crc kubenswrapper[4865]: I0126 17:14:45.476469 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"f970211e-cd46-409d-abc3-09d13259c370","Type":"ContainerStarted","Data":"97bf1d240e9412570dc7f5327ab90e488b3a9f6775dbf475d5da97a0ed924e66"} Jan 26 17:14:56 crc kubenswrapper[4865]: E0126 17:14:56.049630 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Jan 26 17:14:56 crc kubenswrapper[4865]: E0126 17:14:56.050652 4865 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w9m6g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000710000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_nova-kuttl-default(31ccc966-f964-468b-a364-0bb3360a4933): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 17:14:56 crc kubenswrapper[4865]: E0126 17:14:56.051914 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="nova-kuttl-default/rabbitmq-server-0" podUID="31ccc966-f964-468b-a364-0bb3360a4933" Jan 26 17:14:56 crc kubenswrapper[4865]: I0126 17:14:56.602324 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a","Type":"ContainerStarted","Data":"55b8988baab0b4715fe79ca65d483b47423eb25699f1a24371f4d6f71b2cfb5b"} Jan 26 17:14:56 crc kubenswrapper[4865]: I0126 17:14:56.603896 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/memcached-0" event={"ID":"409c7131-e3b9-4bbc-a7db-51f9c150e354","Type":"ContainerStarted","Data":"98d3721e6b0a342c59e0ec46fb8a61d27e310757a3fefc6d274ddc49cade6ed1"} Jan 26 17:14:56 crc kubenswrapper[4865]: I0126 17:14:56.604041 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/memcached-0" Jan 26 17:14:56 crc kubenswrapper[4865]: I0126 17:14:56.605479 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"f970211e-cd46-409d-abc3-09d13259c370","Type":"ContainerStarted","Data":"f28b5adeddc2f157937c0de8265dbf936b384a4a0f395e5d99de393b229bd2cd"} Jan 26 17:14:56 crc kubenswrapper[4865]: I0126 17:14:56.675436 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/memcached-0" podStartSLOduration=1.90189941 podStartE2EDuration="14.67541324s" podCreationTimestamp="2026-01-26 17:14:42 +0000 UTC" firstStartedPulling="2026-01-26 17:14:43.374849533 +0000 UTC m=+1210.958735120" lastFinishedPulling="2026-01-26 17:14:56.148363363 +0000 UTC m=+1223.732248950" observedRunningTime="2026-01-26 17:14:56.672853387 +0000 UTC m=+1224.256738994" watchObservedRunningTime="2026-01-26 17:14:56.67541324 +0000 UTC m=+1224.259298827" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.142289 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm"] Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.143783 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.146850 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.146858 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.161037 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm"] Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.219028 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.219383 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hp459\" (UniqueName: \"kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.219470 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.321899 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hp459\" (UniqueName: \"kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.322464 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.322611 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.324526 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.329750 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.345507 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hp459\" (UniqueName: \"kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459\") pod \"collect-profiles-29490795-9qtlm\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.470851 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:00 crc kubenswrapper[4865]: I0126 17:15:00.926023 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm"] Jan 26 17:15:00 crc kubenswrapper[4865]: W0126 17:15:00.934633 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbbf6b25b_12a4_4d37_bf26_b5b2228628a1.slice/crio-127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5 WatchSource:0}: Error finding container 127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5: Status 404 returned error can't find the container with id 127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5 Jan 26 17:15:01 crc kubenswrapper[4865]: I0126 17:15:01.662974 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" event={"ID":"bbf6b25b-12a4-4d37-bf26-b5b2228628a1","Type":"ContainerStarted","Data":"127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5"} Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.671938 4865 generic.go:334] "Generic (PLEG): container finished" podID="bbf6b25b-12a4-4d37-bf26-b5b2228628a1" containerID="39b8827076fe0329ce428e9d9b28f2b8ef7d8527b6bf0dd35acfea7fe2af39b4" exitCode=0 Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.672023 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" event={"ID":"bbf6b25b-12a4-4d37-bf26-b5b2228628a1","Type":"ContainerDied","Data":"39b8827076fe0329ce428e9d9b28f2b8ef7d8527b6bf0dd35acfea7fe2af39b4"} Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.676365 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"0b48bb4a-936f-491b-b54a-a66bfcac547d","Type":"ContainerStarted","Data":"6d1ab2282f94c1a365a56d7dd910df5cad0592e2dc54fbe024966bae74b00d72"} Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.678019 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"31ccc966-f964-468b-a364-0bb3360a4933","Type":"ContainerStarted","Data":"3fdb7968ba934b26ca829a0aa52abfa96692801423b6f7be6168da03cf063878"} Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.679689 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"13ed1e3f-65dc-47e3-a8bc-c33f37660f23","Type":"ContainerStarted","Data":"b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b"} Jan 26 17:15:02 crc kubenswrapper[4865]: I0126 17:15:02.907955 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/memcached-0" Jan 26 17:15:03 crc kubenswrapper[4865]: I0126 17:15:03.981109 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.091619 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume\") pod \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.091818 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hp459\" (UniqueName: \"kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459\") pod \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.091934 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume\") pod \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\" (UID: \"bbf6b25b-12a4-4d37-bf26-b5b2228628a1\") " Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.093511 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume" (OuterVolumeSpecName: "config-volume") pod "bbf6b25b-12a4-4d37-bf26-b5b2228628a1" (UID: "bbf6b25b-12a4-4d37-bf26-b5b2228628a1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.098207 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bbf6b25b-12a4-4d37-bf26-b5b2228628a1" (UID: "bbf6b25b-12a4-4d37-bf26-b5b2228628a1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.098239 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459" (OuterVolumeSpecName: "kube-api-access-hp459") pod "bbf6b25b-12a4-4d37-bf26-b5b2228628a1" (UID: "bbf6b25b-12a4-4d37-bf26-b5b2228628a1"). InnerVolumeSpecName "kube-api-access-hp459". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.194519 4865 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.194570 4865 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.194587 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hp459\" (UniqueName: \"kubernetes.io/projected/bbf6b25b-12a4-4d37-bf26-b5b2228628a1-kube-api-access-hp459\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.511661 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.512131 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.512186 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.512901 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.512962 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e" gracePeriod=600 Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.697693 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" event={"ID":"bbf6b25b-12a4-4d37-bf26-b5b2228628a1","Type":"ContainerDied","Data":"127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5"} Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.697746 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="127b29309127263f7954412c72320b0cd67321681beb1bc7c0842978e18a53a5" Jan 26 17:15:04 crc kubenswrapper[4865]: I0126 17:15:04.697744 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29490795-9qtlm" Jan 26 17:15:05 crc kubenswrapper[4865]: I0126 17:15:05.707906 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e" exitCode=0 Jan 26 17:15:05 crc kubenswrapper[4865]: I0126 17:15:05.707973 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e"} Jan 26 17:15:05 crc kubenswrapper[4865]: I0126 17:15:05.708380 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681"} Jan 26 17:15:05 crc kubenswrapper[4865]: I0126 17:15:05.708406 4865 scope.go:117] "RemoveContainer" containerID="75da2701546c4de83d6fc2fed2ae87c37fb18a0f0c6813145dc77a86aa6e728c" Jan 26 17:15:08 crc kubenswrapper[4865]: I0126 17:15:08.740244 4865 generic.go:334] "Generic (PLEG): container finished" podID="2ae87188-ebdc-44a7-8504-5f61c2e4ea9a" containerID="55b8988baab0b4715fe79ca65d483b47423eb25699f1a24371f4d6f71b2cfb5b" exitCode=0 Jan 26 17:15:08 crc kubenswrapper[4865]: I0126 17:15:08.740375 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a","Type":"ContainerDied","Data":"55b8988baab0b4715fe79ca65d483b47423eb25699f1a24371f4d6f71b2cfb5b"} Jan 26 17:15:08 crc kubenswrapper[4865]: I0126 17:15:08.744490 4865 generic.go:334] "Generic (PLEG): container finished" podID="f970211e-cd46-409d-abc3-09d13259c370" containerID="f28b5adeddc2f157937c0de8265dbf936b384a4a0f395e5d99de393b229bd2cd" exitCode=0 Jan 26 17:15:08 crc kubenswrapper[4865]: I0126 17:15:08.744554 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"f970211e-cd46-409d-abc3-09d13259c370","Type":"ContainerDied","Data":"f28b5adeddc2f157937c0de8265dbf936b384a4a0f395e5d99de393b229bd2cd"} Jan 26 17:15:09 crc kubenswrapper[4865]: I0126 17:15:09.756653 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-cell1-galera-0" event={"ID":"f970211e-cd46-409d-abc3-09d13259c370","Type":"ContainerStarted","Data":"ae1efbfcd84bf4853448dd5a06fa516440aebb045ce28e869c21652196131d71"} Jan 26 17:15:09 crc kubenswrapper[4865]: I0126 17:15:09.761552 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstack-galera-0" event={"ID":"2ae87188-ebdc-44a7-8504-5f61c2e4ea9a","Type":"ContainerStarted","Data":"f2ea3adb4cb1385fc7100f5bb31cb3cf2b1cccd0453e12dea60aa5e2bb6ff7cf"} Jan 26 17:15:09 crc kubenswrapper[4865]: I0126 17:15:09.785434 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-cell1-galera-0" podStartSLOduration=16.094489132 podStartE2EDuration="27.78541504s" podCreationTimestamp="2026-01-26 17:14:42 +0000 UTC" firstStartedPulling="2026-01-26 17:14:44.463048495 +0000 UTC m=+1212.046934082" lastFinishedPulling="2026-01-26 17:14:56.153974403 +0000 UTC m=+1223.737859990" observedRunningTime="2026-01-26 17:15:09.778633847 +0000 UTC m=+1237.362519444" watchObservedRunningTime="2026-01-26 17:15:09.78541504 +0000 UTC m=+1237.369300627" Jan 26 17:15:12 crc kubenswrapper[4865]: I0126 17:15:12.599412 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:15:12 crc kubenswrapper[4865]: I0126 17:15:12.600047 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:15:13 crc kubenswrapper[4865]: I0126 17:15:13.992274 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:15:13 crc kubenswrapper[4865]: I0126 17:15:13.992325 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:15:14 crc kubenswrapper[4865]: I0126 17:15:14.076637 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:15:14 crc kubenswrapper[4865]: I0126 17:15:14.101407 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstack-galera-0" podStartSLOduration=20.074532369 podStartE2EDuration="33.101378368s" podCreationTimestamp="2026-01-26 17:14:41 +0000 UTC" firstStartedPulling="2026-01-26 17:14:43.122696158 +0000 UTC m=+1210.706581745" lastFinishedPulling="2026-01-26 17:14:56.149542157 +0000 UTC m=+1223.733427744" observedRunningTime="2026-01-26 17:15:09.799622504 +0000 UTC m=+1237.383508091" watchObservedRunningTime="2026-01-26 17:15:14.101378368 +0000 UTC m=+1241.685263965" Jan 26 17:15:14 crc kubenswrapper[4865]: I0126 17:15:14.899794 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-cell1-galera-0" Jan 26 17:15:16 crc kubenswrapper[4865]: I0126 17:15:16.693123 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:15:16 crc kubenswrapper[4865]: I0126 17:15:16.786335 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/openstack-galera-0" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.311651 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-ssd55"] Jan 26 17:15:21 crc kubenswrapper[4865]: E0126 17:15:21.312206 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf6b25b-12a4-4d37-bf26-b5b2228628a1" containerName="collect-profiles" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.312223 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf6b25b-12a4-4d37-bf26-b5b2228628a1" containerName="collect-profiles" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.313219 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf6b25b-12a4-4d37-bf26-b5b2228628a1" containerName="collect-profiles" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.314111 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.316200 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-mariadb-root-db-secret" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.324127 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-ssd55"] Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.341977 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c4mw\" (UniqueName: \"kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.342169 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.444212 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.444858 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c4mw\" (UniqueName: \"kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.445149 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.468935 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c4mw\" (UniqueName: \"kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw\") pod \"root-account-create-update-ssd55\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:21 crc kubenswrapper[4865]: I0126 17:15:21.639613 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.059526 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-ssd55"] Jan 26 17:15:22 crc kubenswrapper[4865]: W0126 17:15:22.069592 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd13bc4e2_3bfa_44f6_a0f4_892f41de8d11.slice/crio-0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c WatchSource:0}: Error finding container 0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c: Status 404 returned error can't find the container with id 0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.371072 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-create-fzdd2"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.373428 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.375262 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-fzdd2"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.564523 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5chv\" (UniqueName: \"kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.564881 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.590217 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-3f89-account-create-update-d8gsn"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.591875 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.594806 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-db-secret" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.600865 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-3f89-account-create-update-d8gsn"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.666523 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.667416 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfz59\" (UniqueName: \"kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.667451 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.667720 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.667899 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5chv\" (UniqueName: \"kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.690925 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5chv\" (UniqueName: \"kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv\") pod \"keystone-db-create-fzdd2\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.709385 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.790366 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfz59\" (UniqueName: \"kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.790427 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.791377 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.802681 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-create-kvzzf"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.805175 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.816110 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfz59\" (UniqueName: \"kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59\") pod \"keystone-3f89-account-create-update-d8gsn\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.821762 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-kvzzf"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.879943 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ssd55" event={"ID":"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11","Type":"ContainerStarted","Data":"6db6b555b47c5e9b75fb30cc29f9d8a987e025f4e0b2b45675a890cc3d9ff261"} Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.880477 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ssd55" event={"ID":"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11","Type":"ContainerStarted","Data":"0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c"} Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.892321 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-4c01-account-create-update-qgj5c"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.894423 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.896581 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-db-secret" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.901639 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-4c01-account-create-update-qgj5c"] Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.948904 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.994701 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:22 crc kubenswrapper[4865]: I0126 17:15:22.994786 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h225k\" (UniqueName: \"kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.097373 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.097445 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbnmp\" (UniqueName: \"kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.097496 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.097680 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h225k\" (UniqueName: \"kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.098371 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.123711 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h225k\" (UniqueName: \"kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k\") pod \"placement-db-create-kvzzf\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.180346 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.194807 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-create-fzdd2"] Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.198976 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbnmp\" (UniqueName: \"kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.199063 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.200302 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: W0126 17:15:23.211556 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae7b537d_f915_4d41_8fd2_55f7dd75bd65.slice/crio-a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac WatchSource:0}: Error finding container a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac: Status 404 returned error can't find the container with id a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.223517 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbnmp\" (UniqueName: \"kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp\") pod \"placement-4c01-account-create-update-qgj5c\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.374838 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-3f89-account-create-update-d8gsn"] Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.403966 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-create-kvzzf"] Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.514639 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:23 crc kubenswrapper[4865]: W0126 17:15:23.557801 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78d9e25c_0c19_434f_aa78_264b8a4bb52e.slice/crio-75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742 WatchSource:0}: Error finding container 75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742: Status 404 returned error can't find the container with id 75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742 Jan 26 17:15:23 crc kubenswrapper[4865]: W0126 17:15:23.558364 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode320b6ce_4319_432a_96ba_9e1d54f795cc.slice/crio-2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129 WatchSource:0}: Error finding container 2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129: Status 404 returned error can't find the container with id 2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129 Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.888471 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-kvzzf" event={"ID":"e320b6ce-4319-432a-96ba-9e1d54f795cc","Type":"ContainerStarted","Data":"2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129"} Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.889556 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-fzdd2" event={"ID":"ae7b537d-f915-4d41-8fd2-55f7dd75bd65","Type":"ContainerStarted","Data":"a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac"} Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.891011 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" event={"ID":"78d9e25c-0c19-434f-aa78-264b8a4bb52e","Type":"ContainerStarted","Data":"75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742"} Jan 26 17:15:23 crc kubenswrapper[4865]: I0126 17:15:23.908530 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/root-account-create-update-ssd55" podStartSLOduration=2.9085060069999997 podStartE2EDuration="2.908506007s" podCreationTimestamp="2026-01-26 17:15:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:23.906809969 +0000 UTC m=+1251.490695556" watchObservedRunningTime="2026-01-26 17:15:23.908506007 +0000 UTC m=+1251.492391594" Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.052151 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-4c01-account-create-update-qgj5c"] Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.899060 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" event={"ID":"064698bb-635f-4b5c-8fa8-8f533ec421e1","Type":"ContainerStarted","Data":"4349c6b16a21cc941d2d5e4dc9e43cd86926ff2ec6a859ae24a80c29327e3c03"} Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.899456 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" event={"ID":"064698bb-635f-4b5c-8fa8-8f533ec421e1","Type":"ContainerStarted","Data":"aa47f1ccf7ece530c5018ef77f5bb78bc51b27bd8980680574327a79d5c7aa26"} Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.900542 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" event={"ID":"78d9e25c-0c19-434f-aa78-264b8a4bb52e","Type":"ContainerStarted","Data":"70d9caa7b8c3fda40ff79b0a19ad382f3ae7aa6f281c0130a51a4681a1a102c7"} Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.903078 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-kvzzf" event={"ID":"e320b6ce-4319-432a-96ba-9e1d54f795cc","Type":"ContainerStarted","Data":"298fb6fc3b01065bcc70a7125bedc91845091191aa24851e4d663c83ad588381"} Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.904345 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-fzdd2" event={"ID":"ae7b537d-f915-4d41-8fd2-55f7dd75bd65","Type":"ContainerStarted","Data":"d2f8921b572e864cb5f960a3ebf95caeb463c3155e687d4a7a032004b7c71680"} Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.919542 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" podStartSLOduration=2.919522465 podStartE2EDuration="2.919522465s" podCreationTimestamp="2026-01-26 17:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:24.912749303 +0000 UTC m=+1252.496634900" watchObservedRunningTime="2026-01-26 17:15:24.919522465 +0000 UTC m=+1252.503408052" Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.931104 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-db-create-fzdd2" podStartSLOduration=2.9310859540000003 podStartE2EDuration="2.931085954s" podCreationTimestamp="2026-01-26 17:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:24.930826377 +0000 UTC m=+1252.514711974" watchObservedRunningTime="2026-01-26 17:15:24.931085954 +0000 UTC m=+1252.514971561" Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.952751 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-db-create-kvzzf" podStartSLOduration=2.952727239 podStartE2EDuration="2.952727239s" podCreationTimestamp="2026-01-26 17:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:24.944419693 +0000 UTC m=+1252.528305290" watchObservedRunningTime="2026-01-26 17:15:24.952727239 +0000 UTC m=+1252.536612826" Jan 26 17:15:24 crc kubenswrapper[4865]: I0126 17:15:24.965228 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" podStartSLOduration=2.965206744 podStartE2EDuration="2.965206744s" podCreationTimestamp="2026-01-26 17:15:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:24.960087888 +0000 UTC m=+1252.543973465" watchObservedRunningTime="2026-01-26 17:15:24.965206744 +0000 UTC m=+1252.549092331" Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.914946 4865 generic.go:334] "Generic (PLEG): container finished" podID="e320b6ce-4319-432a-96ba-9e1d54f795cc" containerID="298fb6fc3b01065bcc70a7125bedc91845091191aa24851e4d663c83ad588381" exitCode=0 Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.915434 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-kvzzf" event={"ID":"e320b6ce-4319-432a-96ba-9e1d54f795cc","Type":"ContainerDied","Data":"298fb6fc3b01065bcc70a7125bedc91845091191aa24851e4d663c83ad588381"} Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.917093 4865 generic.go:334] "Generic (PLEG): container finished" podID="ae7b537d-f915-4d41-8fd2-55f7dd75bd65" containerID="d2f8921b572e864cb5f960a3ebf95caeb463c3155e687d4a7a032004b7c71680" exitCode=0 Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.917151 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-fzdd2" event={"ID":"ae7b537d-f915-4d41-8fd2-55f7dd75bd65","Type":"ContainerDied","Data":"d2f8921b572e864cb5f960a3ebf95caeb463c3155e687d4a7a032004b7c71680"} Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.918783 4865 generic.go:334] "Generic (PLEG): container finished" podID="d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" containerID="6db6b555b47c5e9b75fb30cc29f9d8a987e025f4e0b2b45675a890cc3d9ff261" exitCode=0 Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.918840 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ssd55" event={"ID":"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11","Type":"ContainerDied","Data":"6db6b555b47c5e9b75fb30cc29f9d8a987e025f4e0b2b45675a890cc3d9ff261"} Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.920367 4865 generic.go:334] "Generic (PLEG): container finished" podID="064698bb-635f-4b5c-8fa8-8f533ec421e1" containerID="4349c6b16a21cc941d2d5e4dc9e43cd86926ff2ec6a859ae24a80c29327e3c03" exitCode=0 Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.920483 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" event={"ID":"064698bb-635f-4b5c-8fa8-8f533ec421e1","Type":"ContainerDied","Data":"4349c6b16a21cc941d2d5e4dc9e43cd86926ff2ec6a859ae24a80c29327e3c03"} Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.921934 4865 generic.go:334] "Generic (PLEG): container finished" podID="78d9e25c-0c19-434f-aa78-264b8a4bb52e" containerID="70d9caa7b8c3fda40ff79b0a19ad382f3ae7aa6f281c0130a51a4681a1a102c7" exitCode=0 Jan 26 17:15:25 crc kubenswrapper[4865]: I0126 17:15:25.921965 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" event={"ID":"78d9e25c-0c19-434f-aa78-264b8a4bb52e","Type":"ContainerDied","Data":"70d9caa7b8c3fda40ff79b0a19ad382f3ae7aa6f281c0130a51a4681a1a102c7"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.235050 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.372089 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts\") pod \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.372386 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5chv\" (UniqueName: \"kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv\") pod \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\" (UID: \"ae7b537d-f915-4d41-8fd2-55f7dd75bd65\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.374192 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ae7b537d-f915-4d41-8fd2-55f7dd75bd65" (UID: "ae7b537d-f915-4d41-8fd2-55f7dd75bd65"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.379583 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv" (OuterVolumeSpecName: "kube-api-access-k5chv") pod "ae7b537d-f915-4d41-8fd2-55f7dd75bd65" (UID: "ae7b537d-f915-4d41-8fd2-55f7dd75bd65"). InnerVolumeSpecName "kube-api-access-k5chv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.474476 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5chv\" (UniqueName: \"kubernetes.io/projected/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-kube-api-access-k5chv\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.475406 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ae7b537d-f915-4d41-8fd2-55f7dd75bd65-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.502706 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.512133 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.521608 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.546065 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677292 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h225k\" (UniqueName: \"kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k\") pod \"e320b6ce-4319-432a-96ba-9e1d54f795cc\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677370 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts\") pod \"064698bb-635f-4b5c-8fa8-8f533ec421e1\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677402 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbnmp\" (UniqueName: \"kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp\") pod \"064698bb-635f-4b5c-8fa8-8f533ec421e1\" (UID: \"064698bb-635f-4b5c-8fa8-8f533ec421e1\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677543 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfz59\" (UniqueName: \"kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59\") pod \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677656 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts\") pod \"e320b6ce-4319-432a-96ba-9e1d54f795cc\" (UID: \"e320b6ce-4319-432a-96ba-9e1d54f795cc\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677693 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4mw\" (UniqueName: \"kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw\") pod \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677734 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts\") pod \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\" (UID: \"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.677761 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts\") pod \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\" (UID: \"78d9e25c-0c19-434f-aa78-264b8a4bb52e\") " Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.678118 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "064698bb-635f-4b5c-8fa8-8f533ec421e1" (UID: "064698bb-635f-4b5c-8fa8-8f533ec421e1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.678518 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "78d9e25c-0c19-434f-aa78-264b8a4bb52e" (UID: "78d9e25c-0c19-434f-aa78-264b8a4bb52e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.678797 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e320b6ce-4319-432a-96ba-9e1d54f795cc" (UID: "e320b6ce-4319-432a-96ba-9e1d54f795cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.678796 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" (UID: "d13bc4e2-3bfa-44f6-a0f4-892f41de8d11"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.680902 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k" (OuterVolumeSpecName: "kube-api-access-h225k") pod "e320b6ce-4319-432a-96ba-9e1d54f795cc" (UID: "e320b6ce-4319-432a-96ba-9e1d54f795cc"). InnerVolumeSpecName "kube-api-access-h225k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.680977 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59" (OuterVolumeSpecName: "kube-api-access-qfz59") pod "78d9e25c-0c19-434f-aa78-264b8a4bb52e" (UID: "78d9e25c-0c19-434f-aa78-264b8a4bb52e"). InnerVolumeSpecName "kube-api-access-qfz59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.681615 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw" (OuterVolumeSpecName: "kube-api-access-7c4mw") pod "d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" (UID: "d13bc4e2-3bfa-44f6-a0f4-892f41de8d11"). InnerVolumeSpecName "kube-api-access-7c4mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.681617 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp" (OuterVolumeSpecName: "kube-api-access-cbnmp") pod "064698bb-635f-4b5c-8fa8-8f533ec421e1" (UID: "064698bb-635f-4b5c-8fa8-8f533ec421e1"). InnerVolumeSpecName "kube-api-access-cbnmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780696 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h225k\" (UniqueName: \"kubernetes.io/projected/e320b6ce-4319-432a-96ba-9e1d54f795cc-kube-api-access-h225k\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780767 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/064698bb-635f-4b5c-8fa8-8f533ec421e1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780780 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbnmp\" (UniqueName: \"kubernetes.io/projected/064698bb-635f-4b5c-8fa8-8f533ec421e1-kube-api-access-cbnmp\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780792 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfz59\" (UniqueName: \"kubernetes.io/projected/78d9e25c-0c19-434f-aa78-264b8a4bb52e-kube-api-access-qfz59\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780841 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e320b6ce-4319-432a-96ba-9e1d54f795cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780855 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4mw\" (UniqueName: \"kubernetes.io/projected/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-kube-api-access-7c4mw\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780867 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.780877 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78d9e25c-0c19-434f-aa78-264b8a4bb52e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.936778 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-create-fzdd2" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.936776 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-create-fzdd2" event={"ID":"ae7b537d-f915-4d41-8fd2-55f7dd75bd65","Type":"ContainerDied","Data":"a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.936837 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a05f49d4c8cc24759529fd39227ad657abc2ba923f8eb672c84b0381abe2ccac" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.938222 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-ssd55" event={"ID":"d13bc4e2-3bfa-44f6-a0f4-892f41de8d11","Type":"ContainerDied","Data":"0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.938252 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-ssd55" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.938269 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e3802cba1c8390877dbe0867ecd38bd76ea76c1f3bacea0afd713d364ac882c" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.939743 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.939748 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-4c01-account-create-update-qgj5c" event={"ID":"064698bb-635f-4b5c-8fa8-8f533ec421e1","Type":"ContainerDied","Data":"aa47f1ccf7ece530c5018ef77f5bb78bc51b27bd8980680574327a79d5c7aa26"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.940159 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa47f1ccf7ece530c5018ef77f5bb78bc51b27bd8980680574327a79d5c7aa26" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.941313 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.941299 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-3f89-account-create-update-d8gsn" event={"ID":"78d9e25c-0c19-434f-aa78-264b8a4bb52e","Type":"ContainerDied","Data":"75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.941467 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75a8cdd595b99a0c9d28ec1423307bfca7f6aba2679974c42dd24ee2eea51742" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.943006 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-create-kvzzf" event={"ID":"e320b6ce-4319-432a-96ba-9e1d54f795cc","Type":"ContainerDied","Data":"2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129"} Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.943119 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2085e1856fe3ea27fc3bafb1b79687b7cf1f8e5884377246516b8cf93f8e9129" Jan 26 17:15:27 crc kubenswrapper[4865]: I0126 17:15:27.943069 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-create-kvzzf" Jan 26 17:15:32 crc kubenswrapper[4865]: I0126 17:15:32.820657 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-ssd55"] Jan 26 17:15:32 crc kubenswrapper[4865]: I0126 17:15:32.827348 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-ssd55"] Jan 26 17:15:34 crc kubenswrapper[4865]: I0126 17:15:34.369282 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" path="/var/lib/kubelet/pods/d13bc4e2-3bfa-44f6-a0f4-892f41de8d11/volumes" Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.024959 4865 generic.go:334] "Generic (PLEG): container finished" podID="0b48bb4a-936f-491b-b54a-a66bfcac547d" containerID="6d1ab2282f94c1a365a56d7dd910df5cad0592e2dc54fbe024966bae74b00d72" exitCode=0 Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.025065 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"0b48bb4a-936f-491b-b54a-a66bfcac547d","Type":"ContainerDied","Data":"6d1ab2282f94c1a365a56d7dd910df5cad0592e2dc54fbe024966bae74b00d72"} Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.027378 4865 generic.go:334] "Generic (PLEG): container finished" podID="31ccc966-f964-468b-a364-0bb3360a4933" containerID="3fdb7968ba934b26ca829a0aa52abfa96692801423b6f7be6168da03cf063878" exitCode=0 Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.027457 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"31ccc966-f964-468b-a364-0bb3360a4933","Type":"ContainerDied","Data":"3fdb7968ba934b26ca829a0aa52abfa96692801423b6f7be6168da03cf063878"} Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.030496 4865 generic.go:334] "Generic (PLEG): container finished" podID="13ed1e3f-65dc-47e3-a8bc-c33f37660f23" containerID="b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b" exitCode=0 Jan 26 17:15:35 crc kubenswrapper[4865]: I0126 17:15:35.030521 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"13ed1e3f-65dc-47e3-a8bc-c33f37660f23","Type":"ContainerDied","Data":"b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b"} Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.055933 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-cell1-server-0" event={"ID":"0b48bb4a-936f-491b-b54a-a66bfcac547d","Type":"ContainerStarted","Data":"ab4e929537e7895eb29e8d97af360fe7568923d49338b6d551083fd754efe211"} Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.058891 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.063510 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-server-0" event={"ID":"31ccc966-f964-468b-a364-0bb3360a4933","Type":"ContainerStarted","Data":"2eb5d8565bfa87df8aabc4d2e16f2c4485d3fc2ef24c6d48c1dfb74a53487fba"} Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.063794 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.065296 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" event={"ID":"13ed1e3f-65dc-47e3-a8bc-c33f37660f23","Type":"ContainerStarted","Data":"ba70040e5770691cbb4762b02cbc19c8ae167e3cded895fe4eb1158bf8a5fa91"} Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.065876 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.097660 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-cell1-server-0" podStartSLOduration=42.489194429 podStartE2EDuration="56.097641372s" podCreationTimestamp="2026-01-26 17:14:40 +0000 UTC" firstStartedPulling="2026-01-26 17:14:42.50789038 +0000 UTC m=+1210.091775967" lastFinishedPulling="2026-01-26 17:14:56.116337323 +0000 UTC m=+1223.700222910" observedRunningTime="2026-01-26 17:15:36.088099491 +0000 UTC m=+1263.671985118" watchObservedRunningTime="2026-01-26 17:15:36.097641372 +0000 UTC m=+1263.681526959" Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.141546 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-server-0" podStartSLOduration=-9223371979.71325 podStartE2EDuration="57.141527429s" podCreationTimestamp="2026-01-26 17:14:39 +0000 UTC" firstStartedPulling="2026-01-26 17:14:42.090643483 +0000 UTC m=+1209.674529070" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:15:36.140955253 +0000 UTC m=+1263.724840840" watchObservedRunningTime="2026-01-26 17:15:36.141527429 +0000 UTC m=+1263.725413016" Jan 26 17:15:36 crc kubenswrapper[4865]: I0126 17:15:36.149972 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" podStartSLOduration=42.061830884 podStartE2EDuration="56.149960599s" podCreationTimestamp="2026-01-26 17:14:40 +0000 UTC" firstStartedPulling="2026-01-26 17:14:42.021949811 +0000 UTC m=+1209.605835398" lastFinishedPulling="2026-01-26 17:14:56.110079526 +0000 UTC m=+1223.693965113" observedRunningTime="2026-01-26 17:15:36.117158097 +0000 UTC m=+1263.701043764" watchObservedRunningTime="2026-01-26 17:15:36.149960599 +0000 UTC m=+1263.733846186" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.833247 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/root-account-create-update-x64cv"] Jan 26 17:15:37 crc kubenswrapper[4865]: E0126 17:15:37.834285 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae7b537d-f915-4d41-8fd2-55f7dd75bd65" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834312 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae7b537d-f915-4d41-8fd2-55f7dd75bd65" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: E0126 17:15:37.834339 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d9e25c-0c19-434f-aa78-264b8a4bb52e" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834349 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d9e25c-0c19-434f-aa78-264b8a4bb52e" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: E0126 17:15:37.834380 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e320b6ce-4319-432a-96ba-9e1d54f795cc" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834394 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="e320b6ce-4319-432a-96ba-9e1d54f795cc" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: E0126 17:15:37.834413 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064698bb-635f-4b5c-8fa8-8f533ec421e1" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834423 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="064698bb-635f-4b5c-8fa8-8f533ec421e1" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: E0126 17:15:37.834439 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834448 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834686 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="e320b6ce-4319-432a-96ba-9e1d54f795cc" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834710 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="d13bc4e2-3bfa-44f6-a0f4-892f41de8d11" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834744 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae7b537d-f915-4d41-8fd2-55f7dd75bd65" containerName="mariadb-database-create" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834758 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="064698bb-635f-4b5c-8fa8-8f533ec421e1" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.834772 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="78d9e25c-0c19-434f-aa78-264b8a4bb52e" containerName="mariadb-account-create-update" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.835593 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.873334 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-cell1-mariadb-root-db-secret" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.876931 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-x64cv"] Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.972355 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmbq9\" (UniqueName: \"kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:37 crc kubenswrapper[4865]: I0126 17:15:37.972463 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.073528 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmbq9\" (UniqueName: \"kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.073639 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.074651 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.110268 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmbq9\" (UniqueName: \"kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9\") pod \"root-account-create-update-x64cv\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.187914 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:38 crc kubenswrapper[4865]: I0126 17:15:38.408871 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/root-account-create-update-x64cv"] Jan 26 17:15:39 crc kubenswrapper[4865]: I0126 17:15:39.096537 4865 generic.go:334] "Generic (PLEG): container finished" podID="c738f9ed-ff61-4d5f-84e8-a7fc9936423a" containerID="d815c3f887b492e72a3e6ec3bad4d5bb8a5cb88c04b4c586fdf3454fa4306206" exitCode=0 Jan 26 17:15:39 crc kubenswrapper[4865]: I0126 17:15:39.096919 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-x64cv" event={"ID":"c738f9ed-ff61-4d5f-84e8-a7fc9936423a","Type":"ContainerDied","Data":"d815c3f887b492e72a3e6ec3bad4d5bb8a5cb88c04b4c586fdf3454fa4306206"} Jan 26 17:15:39 crc kubenswrapper[4865]: I0126 17:15:39.096947 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-x64cv" event={"ID":"c738f9ed-ff61-4d5f-84e8-a7fc9936423a","Type":"ContainerStarted","Data":"ca3a7356e2316c20482655e476306f29dacdc71b184342ba0476fa2018202c44"} Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.471596 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.613738 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts\") pod \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.613956 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmbq9\" (UniqueName: \"kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9\") pod \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\" (UID: \"c738f9ed-ff61-4d5f-84e8-a7fc9936423a\") " Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.614436 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c738f9ed-ff61-4d5f-84e8-a7fc9936423a" (UID: "c738f9ed-ff61-4d5f-84e8-a7fc9936423a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.629456 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9" (OuterVolumeSpecName: "kube-api-access-bmbq9") pod "c738f9ed-ff61-4d5f-84e8-a7fc9936423a" (UID: "c738f9ed-ff61-4d5f-84e8-a7fc9936423a"). InnerVolumeSpecName "kube-api-access-bmbq9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.715838 4865 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:40 crc kubenswrapper[4865]: I0126 17:15:40.715880 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmbq9\" (UniqueName: \"kubernetes.io/projected/c738f9ed-ff61-4d5f-84e8-a7fc9936423a-kube-api-access-bmbq9\") on node \"crc\" DevicePath \"\"" Jan 26 17:15:41 crc kubenswrapper[4865]: I0126 17:15:41.111161 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/root-account-create-update-x64cv" event={"ID":"c738f9ed-ff61-4d5f-84e8-a7fc9936423a","Type":"ContainerDied","Data":"ca3a7356e2316c20482655e476306f29dacdc71b184342ba0476fa2018202c44"} Jan 26 17:15:41 crc kubenswrapper[4865]: I0126 17:15:41.111208 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca3a7356e2316c20482655e476306f29dacdc71b184342ba0476fa2018202c44" Jan 26 17:15:41 crc kubenswrapper[4865]: I0126 17:15:41.111270 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/root-account-create-update-x64cv" Jan 26 17:15:42 crc kubenswrapper[4865]: E0126 17:15:42.746604 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:15:51 crc kubenswrapper[4865]: I0126 17:15:51.665651 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-broadcaster-server-0" Jan 26 17:15:51 crc kubenswrapper[4865]: I0126 17:15:51.733229 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-server-0" Jan 26 17:15:51 crc kubenswrapper[4865]: I0126 17:15:51.932636 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/rabbitmq-cell1-server-0" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.845220 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-db-sync-zz8bs"] Jan 26 17:15:52 crc kubenswrapper[4865]: E0126 17:15:52.847570 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c738f9ed-ff61-4d5f-84e8-a7fc9936423a" containerName="mariadb-account-create-update" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.847614 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="c738f9ed-ff61-4d5f-84e8-a7fc9936423a" containerName="mariadb-account-create-update" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.847873 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="c738f9ed-ff61-4d5f-84e8-a7fc9936423a" containerName="mariadb-account-create-update" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.848869 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.856610 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.856655 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.856875 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-skz8r" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.857013 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 26 17:15:52 crc kubenswrapper[4865]: I0126 17:15:52.861277 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-zz8bs"] Jan 26 17:15:52 crc kubenswrapper[4865]: E0126 17:15:52.932555 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.015373 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.015442 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86nwh\" (UniqueName: \"kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.015474 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.116771 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.116838 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86nwh\" (UniqueName: \"kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.116871 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.130207 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.131781 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.134857 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86nwh\" (UniqueName: \"kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh\") pod \"keystone-db-sync-zz8bs\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.181025 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:15:53 crc kubenswrapper[4865]: I0126 17:15:53.684079 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-db-sync-zz8bs"] Jan 26 17:15:54 crc kubenswrapper[4865]: I0126 17:15:54.230088 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-zz8bs" event={"ID":"241e06d3-d93d-4495-92db-79f641de0bdc","Type":"ContainerStarted","Data":"def6cc5f4042244384e685b05254d1669c7df13ba56928b59db97153a281a23f"} Jan 26 17:16:01 crc kubenswrapper[4865]: I0126 17:16:01.301491 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-zz8bs" event={"ID":"241e06d3-d93d-4495-92db-79f641de0bdc","Type":"ContainerStarted","Data":"9323e059beebe11a135a60a87838224659095f2f7142b2cd99d75cb027c05a6d"} Jan 26 17:16:01 crc kubenswrapper[4865]: I0126 17:16:01.322179 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-db-sync-zz8bs" podStartSLOduration=2.070727078 podStartE2EDuration="9.322144258s" podCreationTimestamp="2026-01-26 17:15:52 +0000 UTC" firstStartedPulling="2026-01-26 17:15:53.713255391 +0000 UTC m=+1281.297140978" lastFinishedPulling="2026-01-26 17:16:00.964672571 +0000 UTC m=+1288.548558158" observedRunningTime="2026-01-26 17:16:01.318662419 +0000 UTC m=+1288.902548016" watchObservedRunningTime="2026-01-26 17:16:01.322144258 +0000 UTC m=+1288.906029845" Jan 26 17:16:03 crc kubenswrapper[4865]: E0126 17:16:03.119861 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:16:04 crc kubenswrapper[4865]: I0126 17:16:04.326859 4865 generic.go:334] "Generic (PLEG): container finished" podID="241e06d3-d93d-4495-92db-79f641de0bdc" containerID="9323e059beebe11a135a60a87838224659095f2f7142b2cd99d75cb027c05a6d" exitCode=0 Jan 26 17:16:04 crc kubenswrapper[4865]: I0126 17:16:04.326924 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-zz8bs" event={"ID":"241e06d3-d93d-4495-92db-79f641de0bdc","Type":"ContainerDied","Data":"9323e059beebe11a135a60a87838224659095f2f7142b2cd99d75cb027c05a6d"} Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.610752 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.688199 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86nwh\" (UniqueName: \"kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh\") pod \"241e06d3-d93d-4495-92db-79f641de0bdc\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.688359 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data\") pod \"241e06d3-d93d-4495-92db-79f641de0bdc\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.689279 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle\") pod \"241e06d3-d93d-4495-92db-79f641de0bdc\" (UID: \"241e06d3-d93d-4495-92db-79f641de0bdc\") " Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.694477 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh" (OuterVolumeSpecName: "kube-api-access-86nwh") pod "241e06d3-d93d-4495-92db-79f641de0bdc" (UID: "241e06d3-d93d-4495-92db-79f641de0bdc"). InnerVolumeSpecName "kube-api-access-86nwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.712063 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "241e06d3-d93d-4495-92db-79f641de0bdc" (UID: "241e06d3-d93d-4495-92db-79f641de0bdc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.727642 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data" (OuterVolumeSpecName: "config-data") pod "241e06d3-d93d-4495-92db-79f641de0bdc" (UID: "241e06d3-d93d-4495-92db-79f641de0bdc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.791820 4865 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.791872 4865 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/241e06d3-d93d-4495-92db-79f641de0bdc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:05 crc kubenswrapper[4865]: I0126 17:16:05.791890 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86nwh\" (UniqueName: \"kubernetes.io/projected/241e06d3-d93d-4495-92db-79f641de0bdc-kube-api-access-86nwh\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.342505 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-db-sync-zz8bs" event={"ID":"241e06d3-d93d-4495-92db-79f641de0bdc","Type":"ContainerDied","Data":"def6cc5f4042244384e685b05254d1669c7df13ba56928b59db97153a281a23f"} Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.342559 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="def6cc5f4042244384e685b05254d1669c7df13ba56928b59db97153a281a23f" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.342565 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-db-sync-zz8bs" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.549641 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-stz6v"] Jan 26 17:16:06 crc kubenswrapper[4865]: E0126 17:16:06.552099 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241e06d3-d93d-4495-92db-79f641de0bdc" containerName="keystone-db-sync" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.552131 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="241e06d3-d93d-4495-92db-79f641de0bdc" containerName="keystone-db-sync" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.552359 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="241e06d3-d93d-4495-92db-79f641de0bdc" containerName="keystone-db-sync" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.553073 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.556330 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-skz8r" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.556497 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.556731 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.556803 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.556879 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.573252 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-stz6v"] Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604072 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604257 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604428 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604482 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604565 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5p56\" (UniqueName: \"kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.604608 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706246 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706296 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706513 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5p56\" (UniqueName: \"kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706534 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706616 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.706644 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.713393 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.714885 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.716446 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.719395 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.731084 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.733623 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-db-sync-v8qx4"] Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.735247 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.739131 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.740686 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-k47nm" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.741427 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.744783 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5p56\" (UniqueName: \"kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56\") pod \"keystone-bootstrap-stz6v\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.754372 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-v8qx4"] Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.808291 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.808575 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.808765 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.808935 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7r5l\" (UniqueName: \"kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.809066 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.877110 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.918104 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.918179 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.918238 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.918267 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7r5l\" (UniqueName: \"kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.918293 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.920295 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.926930 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.928907 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.933960 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:06 crc kubenswrapper[4865]: I0126 17:16:06.948087 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7r5l\" (UniqueName: \"kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l\") pod \"placement-db-sync-v8qx4\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:07 crc kubenswrapper[4865]: I0126 17:16:07.103107 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:07 crc kubenswrapper[4865]: I0126 17:16:07.394805 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-db-sync-v8qx4"] Jan 26 17:16:07 crc kubenswrapper[4865]: I0126 17:16:07.428955 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-stz6v"] Jan 26 17:16:07 crc kubenswrapper[4865]: W0126 17:16:07.437873 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod064a3a49_59a8_4160_97cb_c2c1ed797888.slice/crio-a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287 WatchSource:0}: Error finding container a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287: Status 404 returned error can't find the container with id a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287 Jan 26 17:16:08 crc kubenswrapper[4865]: I0126 17:16:08.368805 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-stz6v" event={"ID":"064a3a49-59a8-4160-97cb-c2c1ed797888","Type":"ContainerStarted","Data":"19b5de8f35ba366e45334edd75737da23579b25e16e573c8233911da747f8c60"} Jan 26 17:16:08 crc kubenswrapper[4865]: I0126 17:16:08.369357 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-stz6v" event={"ID":"064a3a49-59a8-4160-97cb-c2c1ed797888","Type":"ContainerStarted","Data":"a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287"} Jan 26 17:16:08 crc kubenswrapper[4865]: I0126 17:16:08.369965 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-v8qx4" event={"ID":"d1754996-361d-43dd-bb98-c8f8cc9875e2","Type":"ContainerStarted","Data":"1ef5139a029dd8c3a73bb4cd85ae74ffb16784a5ad6bd9f2850f36040848f426"} Jan 26 17:16:08 crc kubenswrapper[4865]: I0126 17:16:08.387035 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-stz6v" podStartSLOduration=2.3870174840000002 podStartE2EDuration="2.387017484s" podCreationTimestamp="2026-01-26 17:16:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:16:08.382746902 +0000 UTC m=+1295.966632509" watchObservedRunningTime="2026-01-26 17:16:08.387017484 +0000 UTC m=+1295.970903071" Jan 26 17:16:11 crc kubenswrapper[4865]: I0126 17:16:11.395287 4865 generic.go:334] "Generic (PLEG): container finished" podID="064a3a49-59a8-4160-97cb-c2c1ed797888" containerID="19b5de8f35ba366e45334edd75737da23579b25e16e573c8233911da747f8c60" exitCode=0 Jan 26 17:16:11 crc kubenswrapper[4865]: I0126 17:16:11.395463 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-stz6v" event={"ID":"064a3a49-59a8-4160-97cb-c2c1ed797888","Type":"ContainerDied","Data":"19b5de8f35ba366e45334edd75737da23579b25e16e573c8233911da747f8c60"} Jan 26 17:16:11 crc kubenswrapper[4865]: I0126 17:16:11.399847 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-v8qx4" event={"ID":"d1754996-361d-43dd-bb98-c8f8cc9875e2","Type":"ContainerStarted","Data":"1ede6b41f1bd525f6609866fbaa6542cd5fdfb05e3cd6acd72c73721df3a4774"} Jan 26 17:16:11 crc kubenswrapper[4865]: I0126 17:16:11.435828 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-db-sync-v8qx4" podStartSLOduration=1.791413152 podStartE2EDuration="5.435806429s" podCreationTimestamp="2026-01-26 17:16:06 +0000 UTC" firstStartedPulling="2026-01-26 17:16:07.397539058 +0000 UTC m=+1294.981424645" lastFinishedPulling="2026-01-26 17:16:11.041932325 +0000 UTC m=+1298.625817922" observedRunningTime="2026-01-26 17:16:11.433497923 +0000 UTC m=+1299.017383520" watchObservedRunningTime="2026-01-26 17:16:11.435806429 +0000 UTC m=+1299.019692016" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.798531 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844094 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844166 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844245 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844322 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844421 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5p56\" (UniqueName: \"kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.844484 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data\") pod \"064a3a49-59a8-4160-97cb-c2c1ed797888\" (UID: \"064a3a49-59a8-4160-97cb-c2c1ed797888\") " Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.854797 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56" (OuterVolumeSpecName: "kube-api-access-h5p56") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "kube-api-access-h5p56". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.857237 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.857551 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.857533 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts" (OuterVolumeSpecName: "scripts") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.874821 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data" (OuterVolumeSpecName: "config-data") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.877633 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "064a3a49-59a8-4160-97cb-c2c1ed797888" (UID: "064a3a49-59a8-4160-97cb-c2c1ed797888"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947172 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5p56\" (UniqueName: \"kubernetes.io/projected/064a3a49-59a8-4160-97cb-c2c1ed797888-kube-api-access-h5p56\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947216 4865 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947233 4865 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947244 4865 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947255 4865 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:12 crc kubenswrapper[4865]: I0126 17:16:12.947266 4865 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064a3a49-59a8-4160-97cb-c2c1ed797888-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:13 crc kubenswrapper[4865]: E0126 17:16:13.315690 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.421434 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-stz6v" event={"ID":"064a3a49-59a8-4160-97cb-c2c1ed797888","Type":"ContainerDied","Data":"a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287"} Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.421509 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-stz6v" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.421596 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1113d2f7cc1bb80981fac0266aff342ea61e69fa1d075ba5b603df7d2617287" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.424661 4865 generic.go:334] "Generic (PLEG): container finished" podID="d1754996-361d-43dd-bb98-c8f8cc9875e2" containerID="1ede6b41f1bd525f6609866fbaa6542cd5fdfb05e3cd6acd72c73721df3a4774" exitCode=0 Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.424703 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-v8qx4" event={"ID":"d1754996-361d-43dd-bb98-c8f8cc9875e2","Type":"ContainerDied","Data":"1ede6b41f1bd525f6609866fbaa6542cd5fdfb05e3cd6acd72c73721df3a4774"} Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.508464 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-stz6v"] Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.516113 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-stz6v"] Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.602658 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-bootstrap-db774"] Jan 26 17:16:13 crc kubenswrapper[4865]: E0126 17:16:13.603413 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064a3a49-59a8-4160-97cb-c2c1ed797888" containerName="keystone-bootstrap" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.603427 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="064a3a49-59a8-4160-97cb-c2c1ed797888" containerName="keystone-bootstrap" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.603593 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="064a3a49-59a8-4160-97cb-c2c1ed797888" containerName="keystone-bootstrap" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.604234 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.606042 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.610842 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"osp-secret" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.611135 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.611253 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.611313 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-skz8r" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.621699 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-db774"] Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.661175 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.661253 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.661547 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.661638 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.661683 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.662024 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxtt4\" (UniqueName: \"kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763339 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxtt4\" (UniqueName: \"kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763440 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763487 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763639 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763676 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.763708 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.770092 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.770487 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.771219 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.771501 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.798160 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.805504 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxtt4\" (UniqueName: \"kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4\") pod \"keystone-bootstrap-db774\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:13 crc kubenswrapper[4865]: I0126 17:16:13.923509 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:14 crc kubenswrapper[4865]: I0126 17:16:14.373888 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="064a3a49-59a8-4160-97cb-c2c1ed797888" path="/var/lib/kubelet/pods/064a3a49-59a8-4160-97cb-c2c1ed797888/volumes" Jan 26 17:16:14 crc kubenswrapper[4865]: I0126 17:16:14.400153 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-db774"] Jan 26 17:16:14 crc kubenswrapper[4865]: I0126 17:16:14.441184 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-db774" event={"ID":"97f5260e-bc1b-4e6f-8bee-8c9054039c3b","Type":"ContainerStarted","Data":"325c6a9ab66dd0b00d60656e0417f6787e6bfec8cab70b084fc28a0dd1d6a36b"} Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.461629 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-db774" event={"ID":"97f5260e-bc1b-4e6f-8bee-8c9054039c3b","Type":"ContainerStarted","Data":"87fd7d654074c8075d308136c6ca88119875f289d124506f455471751dcffcb7"} Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.866230 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.964162 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7r5l\" (UniqueName: \"kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l\") pod \"d1754996-361d-43dd-bb98-c8f8cc9875e2\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.964251 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle\") pod \"d1754996-361d-43dd-bb98-c8f8cc9875e2\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.964280 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts\") pod \"d1754996-361d-43dd-bb98-c8f8cc9875e2\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.964337 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs\") pod \"d1754996-361d-43dd-bb98-c8f8cc9875e2\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.964415 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data\") pod \"d1754996-361d-43dd-bb98-c8f8cc9875e2\" (UID: \"d1754996-361d-43dd-bb98-c8f8cc9875e2\") " Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.965251 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs" (OuterVolumeSpecName: "logs") pod "d1754996-361d-43dd-bb98-c8f8cc9875e2" (UID: "d1754996-361d-43dd-bb98-c8f8cc9875e2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.971667 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l" (OuterVolumeSpecName: "kube-api-access-r7r5l") pod "d1754996-361d-43dd-bb98-c8f8cc9875e2" (UID: "d1754996-361d-43dd-bb98-c8f8cc9875e2"). InnerVolumeSpecName "kube-api-access-r7r5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.972419 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts" (OuterVolumeSpecName: "scripts") pod "d1754996-361d-43dd-bb98-c8f8cc9875e2" (UID: "d1754996-361d-43dd-bb98-c8f8cc9875e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.992787 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1754996-361d-43dd-bb98-c8f8cc9875e2" (UID: "d1754996-361d-43dd-bb98-c8f8cc9875e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:16 crc kubenswrapper[4865]: I0126 17:16:16.993039 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data" (OuterVolumeSpecName: "config-data") pod "d1754996-361d-43dd-bb98-c8f8cc9875e2" (UID: "d1754996-361d-43dd-bb98-c8f8cc9875e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.069257 4865 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d1754996-361d-43dd-bb98-c8f8cc9875e2-logs\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.069288 4865 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.069300 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7r5l\" (UniqueName: \"kubernetes.io/projected/d1754996-361d-43dd-bb98-c8f8cc9875e2-kube-api-access-r7r5l\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.069310 4865 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.069319 4865 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d1754996-361d-43dd-bb98-c8f8cc9875e2-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.471599 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-db-sync-v8qx4" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.472671 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-db-sync-v8qx4" event={"ID":"d1754996-361d-43dd-bb98-c8f8cc9875e2","Type":"ContainerDied","Data":"1ef5139a029dd8c3a73bb4cd85ae74ffb16784a5ad6bd9f2850f36040848f426"} Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.472709 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ef5139a029dd8c3a73bb4cd85ae74ffb16784a5ad6bd9f2850f36040848f426" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.500843 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-bootstrap-db774" podStartSLOduration=4.500824101 podStartE2EDuration="4.500824101s" podCreationTimestamp="2026-01-26 17:16:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:16:17.492471943 +0000 UTC m=+1305.076357520" watchObservedRunningTime="2026-01-26 17:16:17.500824101 +0000 UTC m=+1305.084709688" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.967252 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/placement-75795794cb-9clbk"] Jan 26 17:16:17 crc kubenswrapper[4865]: E0126 17:16:17.967943 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1754996-361d-43dd-bb98-c8f8cc9875e2" containerName="placement-db-sync" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.967962 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1754996-361d-43dd-bb98-c8f8cc9875e2" containerName="placement-db-sync" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.968153 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1754996-361d-43dd-bb98-c8f8cc9875e2" containerName="placement-db-sync" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.969078 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.979667 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-config-data" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.979801 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-placement-dockercfg-k47nm" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.980309 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"placement-scripts" Jan 26 17:16:17 crc kubenswrapper[4865]: I0126 17:16:17.991533 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-75795794cb-9clbk"] Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.087888 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-combined-ca-bundle\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.087940 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnx97\" (UniqueName: \"kubernetes.io/projected/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-kube-api-access-wnx97\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.087972 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-scripts\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.088006 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-config-data\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.088023 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-logs\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.189683 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnx97\" (UniqueName: \"kubernetes.io/projected/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-kube-api-access-wnx97\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.189777 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-scripts\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.189802 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-config-data\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.190685 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-logs\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.190800 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-combined-ca-bundle\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.191608 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-logs\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.194654 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-scripts\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.195284 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-combined-ca-bundle\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.212494 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-config-data\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.240294 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnx97\" (UniqueName: \"kubernetes.io/projected/8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea-kube-api-access-wnx97\") pod \"placement-75795794cb-9clbk\" (UID: \"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea\") " pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.292394 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:18 crc kubenswrapper[4865]: I0126 17:16:18.769340 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/placement-75795794cb-9clbk"] Jan 26 17:16:18 crc kubenswrapper[4865]: W0126 17:16:18.774296 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8537d9ef_d3c2_4fb9_b955_0ef7983eb4ea.slice/crio-9660817cbd5f9a11f2586a0223600b6abce00376d6d001370827516bf3ae8639 WatchSource:0}: Error finding container 9660817cbd5f9a11f2586a0223600b6abce00376d6d001370827516bf3ae8639: Status 404 returned error can't find the container with id 9660817cbd5f9a11f2586a0223600b6abce00376d6d001370827516bf3ae8639 Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.496153 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-75795794cb-9clbk" event={"ID":"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea","Type":"ContainerStarted","Data":"4a513596d65c247d8970fad4c3bacdd949ba4c910ecb9cc6ff32b900c95552a5"} Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.497895 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.497909 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-75795794cb-9clbk" event={"ID":"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea","Type":"ContainerStarted","Data":"1cb6afc32e81138e9e650dc0d4d70574a4d2a70febb5a2f55649d52850b0d2c0"} Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.497950 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.497961 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/placement-75795794cb-9clbk" event={"ID":"8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea","Type":"ContainerStarted","Data":"9660817cbd5f9a11f2586a0223600b6abce00376d6d001370827516bf3ae8639"} Jan 26 17:16:19 crc kubenswrapper[4865]: I0126 17:16:19.528670 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/placement-75795794cb-9clbk" podStartSLOduration=2.528627693 podStartE2EDuration="2.528627693s" podCreationTimestamp="2026-01-26 17:16:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:16:19.514357787 +0000 UTC m=+1307.098243374" watchObservedRunningTime="2026-01-26 17:16:19.528627693 +0000 UTC m=+1307.112513280" Jan 26 17:16:20 crc kubenswrapper[4865]: I0126 17:16:20.509145 4865 generic.go:334] "Generic (PLEG): container finished" podID="97f5260e-bc1b-4e6f-8bee-8c9054039c3b" containerID="87fd7d654074c8075d308136c6ca88119875f289d124506f455471751dcffcb7" exitCode=0 Jan 26 17:16:20 crc kubenswrapper[4865]: I0126 17:16:20.509237 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-db774" event={"ID":"97f5260e-bc1b-4e6f-8bee-8c9054039c3b","Type":"ContainerDied","Data":"87fd7d654074c8075d308136c6ca88119875f289d124506f455471751dcffcb7"} Jan 26 17:16:21 crc kubenswrapper[4865]: I0126 17:16:21.962937 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086373 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086613 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086659 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086708 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxtt4\" (UniqueName: \"kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086840 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.086879 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys\") pod \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\" (UID: \"97f5260e-bc1b-4e6f-8bee-8c9054039c3b\") " Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.092570 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.092983 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts" (OuterVolumeSpecName: "scripts") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.093044 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4" (OuterVolumeSpecName: "kube-api-access-rxtt4") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "kube-api-access-rxtt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.093311 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.112182 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data" (OuterVolumeSpecName: "config-data") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.125187 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97f5260e-bc1b-4e6f-8bee-8c9054039c3b" (UID: "97f5260e-bc1b-4e6f-8bee-8c9054039c3b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188321 4865 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188366 4865 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188376 4865 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-config-data\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188384 4865 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-scripts\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188396 4865 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.188405 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxtt4\" (UniqueName: \"kubernetes.io/projected/97f5260e-bc1b-4e6f-8bee-8c9054039c3b-kube-api-access-rxtt4\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.527107 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-bootstrap-db774" event={"ID":"97f5260e-bc1b-4e6f-8bee-8c9054039c3b","Type":"ContainerDied","Data":"325c6a9ab66dd0b00d60656e0417f6787e6bfec8cab70b084fc28a0dd1d6a36b"} Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.527149 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="325c6a9ab66dd0b00d60656e0417f6787e6bfec8cab70b084fc28a0dd1d6a36b" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.527221 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-bootstrap-db774" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.732931 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/keystone-5c779875cf-grrzh"] Jan 26 17:16:22 crc kubenswrapper[4865]: E0126 17:16:22.733809 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97f5260e-bc1b-4e6f-8bee-8c9054039c3b" containerName="keystone-bootstrap" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.733833 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="97f5260e-bc1b-4e6f-8bee-8c9054039c3b" containerName="keystone-bootstrap" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.734092 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="97f5260e-bc1b-4e6f-8bee-8c9054039c3b" containerName="keystone-bootstrap" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.734717 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.736723 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-scripts" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.736736 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-config-data" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.737144 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.737263 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"keystone-keystone-dockercfg-skz8r" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.748916 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-5c779875cf-grrzh"] Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900302 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-combined-ca-bundle\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900433 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-scripts\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900461 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pln2\" (UniqueName: \"kubernetes.io/projected/b1441027-296e-4463-ac94-7fa503245019-kube-api-access-5pln2\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900503 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-fernet-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900568 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-config-data\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:22 crc kubenswrapper[4865]: I0126 17:16:22.900728 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-credential-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002127 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-credential-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002245 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-combined-ca-bundle\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002294 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pln2\" (UniqueName: \"kubernetes.io/projected/b1441027-296e-4463-ac94-7fa503245019-kube-api-access-5pln2\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002325 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-scripts\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002734 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-fernet-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.002767 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-config-data\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.006975 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-combined-ca-bundle\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.007538 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-credential-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.008545 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-scripts\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.014886 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-config-data\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.016421 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b1441027-296e-4463-ac94-7fa503245019-fernet-keys\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.018894 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pln2\" (UniqueName: \"kubernetes.io/projected/b1441027-296e-4463-ac94-7fa503245019-kube-api-access-5pln2\") pod \"keystone-5c779875cf-grrzh\" (UID: \"b1441027-296e-4463-ac94-7fa503245019\") " pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.060955 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:23 crc kubenswrapper[4865]: I0126 17:16:23.542293 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/keystone-5c779875cf-grrzh"] Jan 26 17:16:23 crc kubenswrapper[4865]: W0126 17:16:23.544229 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1441027_296e_4463_ac94_7fa503245019.slice/crio-f9a063be75cb22e08cee9a10d08eaf7e11bc55f579eec99c12c725754c58a86b WatchSource:0}: Error finding container f9a063be75cb22e08cee9a10d08eaf7e11bc55f579eec99c12c725754c58a86b: Status 404 returned error can't find the container with id f9a063be75cb22e08cee9a10d08eaf7e11bc55f579eec99c12c725754c58a86b Jan 26 17:16:23 crc kubenswrapper[4865]: E0126 17:16:23.574293 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:16:24 crc kubenswrapper[4865]: I0126 17:16:24.541169 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" event={"ID":"b1441027-296e-4463-ac94-7fa503245019","Type":"ContainerStarted","Data":"19cb28c66b9b03e83f7f2b63611139b803c8fbda528292fdfb7aa60e0f4336a4"} Jan 26 17:16:24 crc kubenswrapper[4865]: I0126 17:16:24.541698 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" event={"ID":"b1441027-296e-4463-ac94-7fa503245019","Type":"ContainerStarted","Data":"f9a063be75cb22e08cee9a10d08eaf7e11bc55f579eec99c12c725754c58a86b"} Jan 26 17:16:25 crc kubenswrapper[4865]: I0126 17:16:25.550045 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:25 crc kubenswrapper[4865]: I0126 17:16:25.573447 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" podStartSLOduration=3.57341277 podStartE2EDuration="3.57341277s" podCreationTimestamp="2026-01-26 17:16:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:16:25.570565099 +0000 UTC m=+1313.154450686" watchObservedRunningTime="2026-01-26 17:16:25.57341277 +0000 UTC m=+1313.157298347" Jan 26 17:16:33 crc kubenswrapper[4865]: E0126 17:16:33.754010 4865 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13ed1e3f_65dc_47e3_a8bc_c33f37660f23.slice/crio-conmon-b22ee16e9db269e990fc1d7298511afac3caea971dc21faadd5373de6c4fb98b.scope\": RecentStats: unable to find data in memory cache]" Jan 26 17:16:49 crc kubenswrapper[4865]: I0126 17:16:49.366749 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:49 crc kubenswrapper[4865]: I0126 17:16:49.369716 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/placement-75795794cb-9clbk" Jan 26 17:16:54 crc kubenswrapper[4865]: I0126 17:16:54.596477 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="nova-kuttl-default/keystone-5c779875cf-grrzh" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.575213 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.578650 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.580988 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"nova-kuttl-default"/"openstack-config" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.581267 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstack-config-secret" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.581752 4865 reflector.go:368] Caches populated for *v1.Secret from object-"nova-kuttl-default"/"openstackclient-openstackclient-dockercfg-ph5ll" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.586703 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.704463 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.704803 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.704938 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24s8q\" (UniqueName: \"kubernetes.io/projected/d410d241-c229-4630-a617-3fbb24a4cb60-kube-api-access-24s8q\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.705091 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.718197 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: E0126 17:16:57.719176 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-24s8q openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="nova-kuttl-default/openstackclient" podUID="d410d241-c229-4630-a617-3fbb24a4cb60" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.724825 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.746812 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.747785 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.761961 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.808643 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.808775 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.808809 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24s8q\" (UniqueName: \"kubernetes.io/projected/d410d241-c229-4630-a617-3fbb24a4cb60-kube-api-access-24s8q\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.808841 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.809664 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: E0126 17:16:57.811245 4865 projected.go:194] Error preparing data for projected volume kube-api-access-24s8q for pod nova-kuttl-default/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (d410d241-c229-4630-a617-3fbb24a4cb60) does not match the UID in record. The object might have been deleted and then recreated Jan 26 17:16:57 crc kubenswrapper[4865]: E0126 17:16:57.811404 4865 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d410d241-c229-4630-a617-3fbb24a4cb60-kube-api-access-24s8q podName:d410d241-c229-4630-a617-3fbb24a4cb60 nodeName:}" failed. No retries permitted until 2026-01-26 17:16:58.311378052 +0000 UTC m=+1345.895263639 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-24s8q" (UniqueName: "kubernetes.io/projected/d410d241-c229-4630-a617-3fbb24a4cb60-kube-api-access-24s8q") pod "openstackclient" (UID: "d410d241-c229-4630-a617-3fbb24a4cb60") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (d410d241-c229-4630-a617-3fbb24a4cb60) does not match the UID in record. The object might have been deleted and then recreated Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.818468 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.820827 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.824807 4865 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="nova-kuttl-default/openstackclient" oldPodUID="d410d241-c229-4630-a617-3fbb24a4cb60" podUID="2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.828212 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.885150 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.910730 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.910811 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qds84\" (UniqueName: \"kubernetes.io/projected/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-kube-api-access-qds84\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.910840 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.910940 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:57 crc kubenswrapper[4865]: I0126 17:16:57.911031 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24s8q\" (UniqueName: \"kubernetes.io/projected/d410d241-c229-4630-a617-3fbb24a4cb60-kube-api-access-24s8q\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.052941 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle\") pod \"d410d241-c229-4630-a617-3fbb24a4cb60\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.053061 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config\") pod \"d410d241-c229-4630-a617-3fbb24a4cb60\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.053929 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d410d241-c229-4630-a617-3fbb24a4cb60" (UID: "d410d241-c229-4630-a617-3fbb24a4cb60"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.054515 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret\") pod \"d410d241-c229-4630-a617-3fbb24a4cb60\" (UID: \"d410d241-c229-4630-a617-3fbb24a4cb60\") " Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.055469 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.055686 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.055715 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qds84\" (UniqueName: \"kubernetes.io/projected/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-kube-api-access-qds84\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.055739 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.056262 4865 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.057262 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.058375 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d410d241-c229-4630-a617-3fbb24a4cb60" (UID: "d410d241-c229-4630-a617-3fbb24a4cb60"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.059124 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-openstack-config-secret\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.071797 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d410d241-c229-4630-a617-3fbb24a4cb60" (UID: "d410d241-c229-4630-a617-3fbb24a4cb60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.072560 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.077207 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qds84\" (UniqueName: \"kubernetes.io/projected/2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0-kube-api-access-qds84\") pod \"openstackclient\" (UID: \"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0\") " pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.157492 4865 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.157534 4865 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d410d241-c229-4630-a617-3fbb24a4cb60-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.370574 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d410d241-c229-4630-a617-3fbb24a4cb60" path="/var/lib/kubelet/pods/d410d241-c229-4630-a617-3fbb24a4cb60/volumes" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.371360 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.830029 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="nova-kuttl-default/openstackclient" Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.831012 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["nova-kuttl-default/openstackclient"] Jan 26 17:16:58 crc kubenswrapper[4865]: I0126 17:16:58.886420 4865 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="nova-kuttl-default/openstackclient" oldPodUID="d410d241-c229-4630-a617-3fbb24a4cb60" podUID="2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0" Jan 26 17:16:59 crc kubenswrapper[4865]: I0126 17:16:59.842618 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0","Type":"ContainerStarted","Data":"cec6549d725b69e87f103e4fb08b7aa34f74fa0cd0494761c8d292694187baa3"} Jan 26 17:17:09 crc kubenswrapper[4865]: I0126 17:17:09.946608 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="nova-kuttl-default/openstackclient" event={"ID":"2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0","Type":"ContainerStarted","Data":"8c83d5cc24467c9d999920055abdd3f72e3d086d734a261bc0eca6068ae6fc1c"} Jan 26 17:17:09 crc kubenswrapper[4865]: I0126 17:17:09.963067 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="nova-kuttl-default/openstackclient" podStartSLOduration=3.096401063 podStartE2EDuration="12.963049243s" podCreationTimestamp="2026-01-26 17:16:57 +0000 UTC" firstStartedPulling="2026-01-26 17:16:58.847133996 +0000 UTC m=+1346.431019583" lastFinishedPulling="2026-01-26 17:17:08.713782176 +0000 UTC m=+1356.297667763" observedRunningTime="2026-01-26 17:17:09.962327013 +0000 UTC m=+1357.546212600" watchObservedRunningTime="2026-01-26 17:17:09.963049243 +0000 UTC m=+1357.546934830" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.164225 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.165157 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" containerName="manager" containerID="cri-o://9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02" gracePeriod=10 Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.406881 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.410064 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.418969 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.516451 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.517262 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" podUID="206b70b2-3963-4d20-9bff-3ce16c46ef86" containerName="operator" containerID="cri-o://652e0ddbdcba627df82fdb5ec20e0b3a0458a70167c4e63f650d30def45b2a8b" gracePeriod=10 Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.576832 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8q2v\" (UniqueName: \"kubernetes.io/projected/b271d3e5-244b-4ccc-96b1-b558035d5e32-kube-api-access-h8q2v\") pod \"nova-operator-controller-manager-7fff5bf694-v2cdb\" (UID: \"b271d3e5-244b-4ccc-96b1-b558035d5e32\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.618378 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.619839 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.621819 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-index-dockercfg-qjzkm" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.638175 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.678139 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8q2v\" (UniqueName: \"kubernetes.io/projected/b271d3e5-244b-4ccc-96b1-b558035d5e32-kube-api-access-h8q2v\") pod \"nova-operator-controller-manager-7fff5bf694-v2cdb\" (UID: \"b271d3e5-244b-4ccc-96b1-b558035d5e32\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.714281 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.744399 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8q2v\" (UniqueName: \"kubernetes.io/projected/b271d3e5-244b-4ccc-96b1-b558035d5e32-kube-api-access-h8q2v\") pod \"nova-operator-controller-manager-7fff5bf694-v2cdb\" (UID: \"b271d3e5-244b-4ccc-96b1-b558035d5e32\") " pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.780038 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z44kx\" (UniqueName: \"kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx\") pod \"nova-operator-index-df5m8\" (UID: \"94e56cf3-eee4-4098-8338-4cf856d1ff2c\") " pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.889660 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhxgl\" (UniqueName: \"kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl\") pod \"6fe7b69f-8c3c-413a-b838-9abd3708a60c\" (UID: \"6fe7b69f-8c3c-413a-b838-9abd3708a60c\") " Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.889917 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z44kx\" (UniqueName: \"kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx\") pod \"nova-operator-index-df5m8\" (UID: \"94e56cf3-eee4-4098-8338-4cf856d1ff2c\") " pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.900112 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl" (OuterVolumeSpecName: "kube-api-access-qhxgl") pod "6fe7b69f-8c3c-413a-b838-9abd3708a60c" (UID: "6fe7b69f-8c3c-413a-b838-9abd3708a60c"). InnerVolumeSpecName "kube-api-access-qhxgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.940741 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z44kx\" (UniqueName: \"kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx\") pod \"nova-operator-index-df5m8\" (UID: \"94e56cf3-eee4-4098-8338-4cf856d1ff2c\") " pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:19 crc kubenswrapper[4865]: I0126 17:17:19.991226 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhxgl\" (UniqueName: \"kubernetes.io/projected/6fe7b69f-8c3c-413a-b838-9abd3708a60c-kube-api-access-qhxgl\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.032955 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.037263 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.038660 4865 generic.go:334] "Generic (PLEG): container finished" podID="206b70b2-3963-4d20-9bff-3ce16c46ef86" containerID="652e0ddbdcba627df82fdb5ec20e0b3a0458a70167c4e63f650d30def45b2a8b" exitCode=0 Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.038747 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" event={"ID":"206b70b2-3963-4d20-9bff-3ce16c46ef86","Type":"ContainerDied","Data":"652e0ddbdcba627df82fdb5ec20e0b3a0458a70167c4e63f650d30def45b2a8b"} Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.040812 4865 generic.go:334] "Generic (PLEG): container finished" podID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" containerID="9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02" exitCode=0 Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.040848 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" event={"ID":"6fe7b69f-8c3c-413a-b838-9abd3708a60c","Type":"ContainerDied","Data":"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02"} Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.040869 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" event={"ID":"6fe7b69f-8c3c-413a-b838-9abd3708a60c","Type":"ContainerDied","Data":"1ccf8446fb351b097f1b0c6d651df6496b1be88ad0fcceef4c8cd769a5c2fc15"} Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.040874 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.040888 4865 scope.go:117] "RemoveContainer" containerID="9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.093664 4865 scope.go:117] "RemoveContainer" containerID="9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02" Jan 26 17:17:20 crc kubenswrapper[4865]: E0126 17:17:20.098416 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02\": container with ID starting with 9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02 not found: ID does not exist" containerID="9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.098464 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02"} err="failed to get container status \"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02\": rpc error: code = NotFound desc = could not find container \"9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02\": container with ID starting with 9161147b06234b809fe199028ddbb2eef378759e1538df3e0c9ba0d0c976ee02 not found: ID does not exist" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.104765 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.109763 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-l9xql"] Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.250778 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.382807 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" path="/var/lib/kubelet/pods/6fe7b69f-8c3c-413a-b838-9abd3708a60c/volumes" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.396694 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.416481 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-czzs6\" (UniqueName: \"kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6\") pod \"206b70b2-3963-4d20-9bff-3ce16c46ef86\" (UID: \"206b70b2-3963-4d20-9bff-3ce16c46ef86\") " Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.428165 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6" (OuterVolumeSpecName: "kube-api-access-czzs6") pod "206b70b2-3963-4d20-9bff-3ce16c46ef86" (UID: "206b70b2-3963-4d20-9bff-3ce16c46ef86"). InnerVolumeSpecName "kube-api-access-czzs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.518793 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-czzs6\" (UniqueName: \"kubernetes.io/projected/206b70b2-3963-4d20-9bff-3ce16c46ef86-kube-api-access-czzs6\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:20 crc kubenswrapper[4865]: I0126 17:17:20.703471 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb"] Jan 26 17:17:20 crc kubenswrapper[4865]: W0126 17:17:20.709322 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb271d3e5_244b_4ccc_96b1_b558035d5e32.slice/crio-36d79e800e38561e7d101aee8251810499628d49a8e28a412323b1f144eb6520 WatchSource:0}: Error finding container 36d79e800e38561e7d101aee8251810499628d49a8e28a412323b1f144eb6520: Status 404 returned error can't find the container with id 36d79e800e38561e7d101aee8251810499628d49a8e28a412323b1f144eb6520 Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.049002 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" event={"ID":"b271d3e5-244b-4ccc-96b1-b558035d5e32","Type":"ContainerStarted","Data":"3a4021e73e579827c785c46075c07b730ff17b7b848e105131f0728d745bf8ef"} Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.049393 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" event={"ID":"b271d3e5-244b-4ccc-96b1-b558035d5e32","Type":"ContainerStarted","Data":"36d79e800e38561e7d101aee8251810499628d49a8e28a412323b1f144eb6520"} Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.050477 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.052022 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.052017 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8" event={"ID":"206b70b2-3963-4d20-9bff-3ce16c46ef86","Type":"ContainerDied","Data":"206c9f39748ddde645376537f41eded75ebc38af1952128c89db86fc8a681943"} Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.052167 4865 scope.go:117] "RemoveContainer" containerID="652e0ddbdcba627df82fdb5ec20e0b3a0458a70167c4e63f650d30def45b2a8b" Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.053479 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-df5m8" event={"ID":"94e56cf3-eee4-4098-8338-4cf856d1ff2c","Type":"ContainerStarted","Data":"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813"} Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.053507 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-df5m8" event={"ID":"94e56cf3-eee4-4098-8338-4cf856d1ff2c","Type":"ContainerStarted","Data":"8ddf8f50c08c915a5975cece55bac7a5e9ee271b9fa3c30a5caf8f3f2938a202"} Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.076292 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" podStartSLOduration=2.076263085 podStartE2EDuration="2.076263085s" podCreationTimestamp="2026-01-26 17:17:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 17:17:21.072758795 +0000 UTC m=+1368.656644402" watchObservedRunningTime="2026-01-26 17:17:21.076263085 +0000 UTC m=+1368.660148672" Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.098182 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-df5m8" podStartSLOduration=1.8524396969999999 podStartE2EDuration="2.098157819s" podCreationTimestamp="2026-01-26 17:17:19 +0000 UTC" firstStartedPulling="2026-01-26 17:17:20.417171334 +0000 UTC m=+1368.001056921" lastFinishedPulling="2026-01-26 17:17:20.662889456 +0000 UTC m=+1368.246775043" observedRunningTime="2026-01-26 17:17:21.08733747 +0000 UTC m=+1368.671223077" watchObservedRunningTime="2026-01-26 17:17:21.098157819 +0000 UTC m=+1368.682043406" Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.113482 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:17:21 crc kubenswrapper[4865]: I0126 17:17:21.124926 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-init-bb665b4d9-6lbn8"] Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.372859 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="206b70b2-3963-4d20-9bff-3ce16c46ef86" path="/var/lib/kubelet/pods/206b70b2-3963-4d20-9bff-3ce16c46ef86/volumes" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.397701 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.804547 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-index-jwcvf"] Jan 26 17:17:22 crc kubenswrapper[4865]: E0126 17:17:22.805017 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="206b70b2-3963-4d20-9bff-3ce16c46ef86" containerName="operator" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.805034 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="206b70b2-3963-4d20-9bff-3ce16c46ef86" containerName="operator" Jan 26 17:17:22 crc kubenswrapper[4865]: E0126 17:17:22.805053 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" containerName="manager" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.805061 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" containerName="manager" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.805261 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fe7b69f-8c3c-413a-b838-9abd3708a60c" containerName="manager" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.805286 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="206b70b2-3963-4d20-9bff-3ce16c46ef86" containerName="operator" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.805919 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.813355 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-jwcvf"] Jan 26 17:17:22 crc kubenswrapper[4865]: I0126 17:17:22.980967 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pczmx\" (UniqueName: \"kubernetes.io/projected/4df291e0-8c70-47eb-a77e-e9f13e3fd3fc-kube-api-access-pczmx\") pod \"nova-operator-index-jwcvf\" (UID: \"4df291e0-8c70-47eb-a77e-e9f13e3fd3fc\") " pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:23 crc kubenswrapper[4865]: I0126 17:17:23.098589 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pczmx\" (UniqueName: \"kubernetes.io/projected/4df291e0-8c70-47eb-a77e-e9f13e3fd3fc-kube-api-access-pczmx\") pod \"nova-operator-index-jwcvf\" (UID: \"4df291e0-8c70-47eb-a77e-e9f13e3fd3fc\") " pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:23 crc kubenswrapper[4865]: I0126 17:17:23.106151 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-index-df5m8" podUID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" containerName="registry-server" containerID="cri-o://7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813" gracePeriod=2 Jan 26 17:17:23 crc kubenswrapper[4865]: I0126 17:17:23.123325 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pczmx\" (UniqueName: \"kubernetes.io/projected/4df291e0-8c70-47eb-a77e-e9f13e3fd3fc-kube-api-access-pczmx\") pod \"nova-operator-index-jwcvf\" (UID: \"4df291e0-8c70-47eb-a77e-e9f13e3fd3fc\") " pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:23 crc kubenswrapper[4865]: I0126 17:17:23.136803 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:23 crc kubenswrapper[4865]: I0126 17:17:23.568776 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-index-jwcvf"] Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.084028 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.113401 4865 generic.go:334] "Generic (PLEG): container finished" podID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" containerID="7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813" exitCode=0 Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.113452 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-index-df5m8" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.113485 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-df5m8" event={"ID":"94e56cf3-eee4-4098-8338-4cf856d1ff2c","Type":"ContainerDied","Data":"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813"} Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.113514 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-df5m8" event={"ID":"94e56cf3-eee4-4098-8338-4cf856d1ff2c","Type":"ContainerDied","Data":"8ddf8f50c08c915a5975cece55bac7a5e9ee271b9fa3c30a5caf8f3f2938a202"} Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.113531 4865 scope.go:117] "RemoveContainer" containerID="7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.123216 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-jwcvf" event={"ID":"4df291e0-8c70-47eb-a77e-e9f13e3fd3fc","Type":"ContainerStarted","Data":"61635ef579b13a2aaf7686c35c0e523ded81d3f33b97e6094afae521781d02fc"} Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.123287 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-index-jwcvf" event={"ID":"4df291e0-8c70-47eb-a77e-e9f13e3fd3fc","Type":"ContainerStarted","Data":"d5967de6eaff8a85eb8e6036fd1049fb3196c2de2acd71510734230b9d3a1ba4"} Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.137528 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-index-jwcvf" podStartSLOduration=2.044132494 podStartE2EDuration="2.137512175s" podCreationTimestamp="2026-01-26 17:17:22 +0000 UTC" firstStartedPulling="2026-01-26 17:17:23.583746896 +0000 UTC m=+1371.167632483" lastFinishedPulling="2026-01-26 17:17:23.677126577 +0000 UTC m=+1371.261012164" observedRunningTime="2026-01-26 17:17:24.135442846 +0000 UTC m=+1371.719328443" watchObservedRunningTime="2026-01-26 17:17:24.137512175 +0000 UTC m=+1371.721397762" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.141237 4865 scope.go:117] "RemoveContainer" containerID="7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813" Jan 26 17:17:24 crc kubenswrapper[4865]: E0126 17:17:24.141780 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813\": container with ID starting with 7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813 not found: ID does not exist" containerID="7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.141821 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813"} err="failed to get container status \"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813\": rpc error: code = NotFound desc = could not find container \"7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813\": container with ID starting with 7315703324e6a9469bef302698e3ce859f6d507f7c62720e0163fcd52ddf5813 not found: ID does not exist" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.216255 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z44kx\" (UniqueName: \"kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx\") pod \"94e56cf3-eee4-4098-8338-4cf856d1ff2c\" (UID: \"94e56cf3-eee4-4098-8338-4cf856d1ff2c\") " Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.221196 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx" (OuterVolumeSpecName: "kube-api-access-z44kx") pod "94e56cf3-eee4-4098-8338-4cf856d1ff2c" (UID: "94e56cf3-eee4-4098-8338-4cf856d1ff2c"). InnerVolumeSpecName "kube-api-access-z44kx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.318758 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z44kx\" (UniqueName: \"kubernetes.io/projected/94e56cf3-eee4-4098-8338-4cf856d1ff2c-kube-api-access-z44kx\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.442096 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:24 crc kubenswrapper[4865]: I0126 17:17:24.453604 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-index-df5m8"] Jan 26 17:17:26 crc kubenswrapper[4865]: I0126 17:17:26.366050 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" path="/var/lib/kubelet/pods/94e56cf3-eee4-4098-8338-4cf856d1ff2c/volumes" Jan 26 17:17:30 crc kubenswrapper[4865]: I0126 17:17:30.040275 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-7fff5bf694-v2cdb" Jan 26 17:17:33 crc kubenswrapper[4865]: I0126 17:17:33.138468 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:33 crc kubenswrapper[4865]: I0126 17:17:33.138910 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:33 crc kubenswrapper[4865]: I0126 17:17:33.170052 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:33 crc kubenswrapper[4865]: I0126 17:17:33.218585 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-index-jwcvf" Jan 26 17:17:34 crc kubenswrapper[4865]: I0126 17:17:34.511808 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:17:34 crc kubenswrapper[4865]: I0126 17:17:34.512241 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.439517 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s"] Jan 26 17:17:40 crc kubenswrapper[4865]: E0126 17:17:40.440394 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" containerName="registry-server" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.440407 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" containerName="registry-server" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.440592 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="94e56cf3-eee4-4098-8338-4cf856d1ff2c" containerName="registry-server" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.441778 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.449864 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-8tnql" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.451793 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s"] Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.491248 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.491308 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db7nn\" (UniqueName: \"kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.491501 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.592663 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.592953 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db7nn\" (UniqueName: \"kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.593110 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.593326 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.593555 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.620117 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db7nn\" (UniqueName: \"kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn\") pod \"1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:40 crc kubenswrapper[4865]: I0126 17:17:40.765479 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:41 crc kubenswrapper[4865]: I0126 17:17:41.212613 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s"] Jan 26 17:17:41 crc kubenswrapper[4865]: I0126 17:17:41.269275 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" event={"ID":"95eb8aec-5bf1-4fad-8d94-937a4147fc38","Type":"ContainerStarted","Data":"18a1ecf899d7b0fffafdd6b935defe09cbc4af8984b073ba0eb7f8349f912d70"} Jan 26 17:17:42 crc kubenswrapper[4865]: I0126 17:17:42.280430 4865 generic.go:334] "Generic (PLEG): container finished" podID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerID="40050bf16d5ba2aa50f79dcf0780973f45e68de3086ad7334591bf256f369ab5" exitCode=0 Jan 26 17:17:42 crc kubenswrapper[4865]: I0126 17:17:42.280527 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" event={"ID":"95eb8aec-5bf1-4fad-8d94-937a4147fc38","Type":"ContainerDied","Data":"40050bf16d5ba2aa50f79dcf0780973f45e68de3086ad7334591bf256f369ab5"} Jan 26 17:17:45 crc kubenswrapper[4865]: I0126 17:17:45.311013 4865 generic.go:334] "Generic (PLEG): container finished" podID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerID="c7d0b111d785aa2b06401e733e1eff5e4452559a93c75a625704d034d2b1a534" exitCode=0 Jan 26 17:17:45 crc kubenswrapper[4865]: I0126 17:17:45.311046 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" event={"ID":"95eb8aec-5bf1-4fad-8d94-937a4147fc38","Type":"ContainerDied","Data":"c7d0b111d785aa2b06401e733e1eff5e4452559a93c75a625704d034d2b1a534"} Jan 26 17:17:46 crc kubenswrapper[4865]: I0126 17:17:46.320375 4865 generic.go:334] "Generic (PLEG): container finished" podID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerID="5843f623d222178ff895824ecde8853291c983e8ad94749f72f95591b6fbd535" exitCode=0 Jan 26 17:17:46 crc kubenswrapper[4865]: I0126 17:17:46.320445 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" event={"ID":"95eb8aec-5bf1-4fad-8d94-937a4147fc38","Type":"ContainerDied","Data":"5843f623d222178ff895824ecde8853291c983e8ad94749f72f95591b6fbd535"} Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.634946 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.732179 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-db7nn\" (UniqueName: \"kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn\") pod \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.732267 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle\") pod \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.732335 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util\") pod \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\" (UID: \"95eb8aec-5bf1-4fad-8d94-937a4147fc38\") " Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.733879 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle" (OuterVolumeSpecName: "bundle") pod "95eb8aec-5bf1-4fad-8d94-937a4147fc38" (UID: "95eb8aec-5bf1-4fad-8d94-937a4147fc38"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.744156 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util" (OuterVolumeSpecName: "util") pod "95eb8aec-5bf1-4fad-8d94-937a4147fc38" (UID: "95eb8aec-5bf1-4fad-8d94-937a4147fc38"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.747473 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn" (OuterVolumeSpecName: "kube-api-access-db7nn") pod "95eb8aec-5bf1-4fad-8d94-937a4147fc38" (UID: "95eb8aec-5bf1-4fad-8d94-937a4147fc38"). InnerVolumeSpecName "kube-api-access-db7nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.835295 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-db7nn\" (UniqueName: \"kubernetes.io/projected/95eb8aec-5bf1-4fad-8d94-937a4147fc38-kube-api-access-db7nn\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.835349 4865 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:47 crc kubenswrapper[4865]: I0126 17:17:47.835362 4865 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/95eb8aec-5bf1-4fad-8d94-937a4147fc38-util\") on node \"crc\" DevicePath \"\"" Jan 26 17:17:48 crc kubenswrapper[4865]: I0126 17:17:48.343835 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" event={"ID":"95eb8aec-5bf1-4fad-8d94-937a4147fc38","Type":"ContainerDied","Data":"18a1ecf899d7b0fffafdd6b935defe09cbc4af8984b073ba0eb7f8349f912d70"} Jan 26 17:17:48 crc kubenswrapper[4865]: I0126 17:17:48.343904 4865 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18a1ecf899d7b0fffafdd6b935defe09cbc4af8984b073ba0eb7f8349f912d70" Jan 26 17:17:48 crc kubenswrapper[4865]: I0126 17:17:48.343938 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s" Jan 26 17:18:04 crc kubenswrapper[4865]: I0126 17:18:04.512218 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:18:04 crc kubenswrapper[4865]: I0126 17:18:04.512779 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.512705 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.513321 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.513366 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.513863 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.513926 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681" gracePeriod=600 Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.749083 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681" exitCode=0 Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.749132 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681"} Jan 26 17:18:34 crc kubenswrapper[4865]: I0126 17:18:34.749500 4865 scope.go:117] "RemoveContainer" containerID="02835d7cd4d6dbfb1002abe48dff7a6d86863fd969778fa1b275c5170d84e16e" Jan 26 17:18:35 crc kubenswrapper[4865]: I0126 17:18:35.763331 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5"} Jan 26 17:20:34 crc kubenswrapper[4865]: I0126 17:20:34.512558 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:20:34 crc kubenswrapper[4865]: I0126 17:20:34.513209 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.410160 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:20:35 crc kubenswrapper[4865]: E0126 17:20:35.410615 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="extract" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.410637 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="extract" Jan 26 17:20:35 crc kubenswrapper[4865]: E0126 17:20:35.410676 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="pull" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.410685 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="pull" Jan 26 17:20:35 crc kubenswrapper[4865]: E0126 17:20:35.410711 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="util" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.410719 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="util" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.410916 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="95eb8aec-5bf1-4fad-8d94-937a4147fc38" containerName="extract" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.412441 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.423973 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.458015 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.458140 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.458186 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9tkt\" (UniqueName: \"kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.559649 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.560300 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.560371 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.560406 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9tkt\" (UniqueName: \"kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.561200 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.582847 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9tkt\" (UniqueName: \"kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt\") pod \"community-operators-bn8p5\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:35 crc kubenswrapper[4865]: I0126 17:20:35.735775 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:36 crc kubenswrapper[4865]: I0126 17:20:36.977485 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:20:37 crc kubenswrapper[4865]: I0126 17:20:37.180632 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerStarted","Data":"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159"} Jan 26 17:20:37 crc kubenswrapper[4865]: I0126 17:20:37.180917 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerStarted","Data":"c19a303a631acb311bee3f5bdfbd6e09c63c479424d6958c1617cc5d92efbb4f"} Jan 26 17:20:38 crc kubenswrapper[4865]: I0126 17:20:38.189568 4865 generic.go:334] "Generic (PLEG): container finished" podID="b366d49f-5de5-403b-8011-b248df9a252a" containerID="f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159" exitCode=0 Jan 26 17:20:38 crc kubenswrapper[4865]: I0126 17:20:38.189625 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerDied","Data":"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159"} Jan 26 17:20:38 crc kubenswrapper[4865]: I0126 17:20:38.192021 4865 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 17:20:40 crc kubenswrapper[4865]: I0126 17:20:40.213627 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerStarted","Data":"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de"} Jan 26 17:20:41 crc kubenswrapper[4865]: I0126 17:20:41.222709 4865 generic.go:334] "Generic (PLEG): container finished" podID="b366d49f-5de5-403b-8011-b248df9a252a" containerID="751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de" exitCode=0 Jan 26 17:20:41 crc kubenswrapper[4865]: I0126 17:20:41.222773 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerDied","Data":"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de"} Jan 26 17:20:46 crc kubenswrapper[4865]: I0126 17:20:46.261453 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerStarted","Data":"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d"} Jan 26 17:20:46 crc kubenswrapper[4865]: I0126 17:20:46.292967 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bn8p5" podStartSLOduration=4.337595626 podStartE2EDuration="11.292950826s" podCreationTimestamp="2026-01-26 17:20:35 +0000 UTC" firstStartedPulling="2026-01-26 17:20:38.191708011 +0000 UTC m=+1565.775593598" lastFinishedPulling="2026-01-26 17:20:45.147063211 +0000 UTC m=+1572.730948798" observedRunningTime="2026-01-26 17:20:46.291428773 +0000 UTC m=+1573.875314370" watchObservedRunningTime="2026-01-26 17:20:46.292950826 +0000 UTC m=+1573.876836413" Jan 26 17:20:55 crc kubenswrapper[4865]: I0126 17:20:55.736358 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:55 crc kubenswrapper[4865]: I0126 17:20:55.737029 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:55 crc kubenswrapper[4865]: I0126 17:20:55.785470 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:56 crc kubenswrapper[4865]: I0126 17:20:56.386273 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:56 crc kubenswrapper[4865]: I0126 17:20:56.438601 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:20:58 crc kubenswrapper[4865]: I0126 17:20:58.348264 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bn8p5" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="registry-server" containerID="cri-o://872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d" gracePeriod=2 Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.282159 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.356245 4865 generic.go:334] "Generic (PLEG): container finished" podID="b366d49f-5de5-403b-8011-b248df9a252a" containerID="872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d" exitCode=0 Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.356293 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerDied","Data":"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d"} Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.356321 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bn8p5" event={"ID":"b366d49f-5de5-403b-8011-b248df9a252a","Type":"ContainerDied","Data":"c19a303a631acb311bee3f5bdfbd6e09c63c479424d6958c1617cc5d92efbb4f"} Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.356316 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bn8p5" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.356352 4865 scope.go:117] "RemoveContainer" containerID="872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.380130 4865 scope.go:117] "RemoveContainer" containerID="751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.414401 4865 scope.go:117] "RemoveContainer" containerID="f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.435687 4865 scope.go:117] "RemoveContainer" containerID="872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.436011 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9tkt\" (UniqueName: \"kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt\") pod \"b366d49f-5de5-403b-8011-b248df9a252a\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.436165 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities\") pod \"b366d49f-5de5-403b-8011-b248df9a252a\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.436235 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content\") pod \"b366d49f-5de5-403b-8011-b248df9a252a\" (UID: \"b366d49f-5de5-403b-8011-b248df9a252a\") " Jan 26 17:20:59 crc kubenswrapper[4865]: E0126 17:20:59.437206 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d\": container with ID starting with 872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d not found: ID does not exist" containerID="872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.437241 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d"} err="failed to get container status \"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d\": rpc error: code = NotFound desc = could not find container \"872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d\": container with ID starting with 872fc0a6997dbce9c3a043344ccb11b45b5d17a1a017423f73628b5b11a5e26d not found: ID does not exist" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.437264 4865 scope.go:117] "RemoveContainer" containerID="751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de" Jan 26 17:20:59 crc kubenswrapper[4865]: E0126 17:20:59.437715 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de\": container with ID starting with 751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de not found: ID does not exist" containerID="751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.437762 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de"} err="failed to get container status \"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de\": rpc error: code = NotFound desc = could not find container \"751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de\": container with ID starting with 751021fbb9c2b2c14b626287c9ca614829a536627b3778df66b7a956ab9cb1de not found: ID does not exist" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.437783 4865 scope.go:117] "RemoveContainer" containerID="f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159" Jan 26 17:20:59 crc kubenswrapper[4865]: E0126 17:20:59.438138 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159\": container with ID starting with f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159 not found: ID does not exist" containerID="f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.438169 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159"} err="failed to get container status \"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159\": rpc error: code = NotFound desc = could not find container \"f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159\": container with ID starting with f98f2bdb764c5b985d44695a3180afe518679ad848827e54ac61c001b99d2159 not found: ID does not exist" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.441462 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt" (OuterVolumeSpecName: "kube-api-access-x9tkt") pod "b366d49f-5de5-403b-8011-b248df9a252a" (UID: "b366d49f-5de5-403b-8011-b248df9a252a"). InnerVolumeSpecName "kube-api-access-x9tkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.453836 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities" (OuterVolumeSpecName: "utilities") pod "b366d49f-5de5-403b-8011-b248df9a252a" (UID: "b366d49f-5de5-403b-8011-b248df9a252a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.487223 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b366d49f-5de5-403b-8011-b248df9a252a" (UID: "b366d49f-5de5-403b-8011-b248df9a252a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.538078 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.538121 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b366d49f-5de5-403b-8011-b248df9a252a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.538134 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9tkt\" (UniqueName: \"kubernetes.io/projected/b366d49f-5de5-403b-8011-b248df9a252a-kube-api-access-x9tkt\") on node \"crc\" DevicePath \"\"" Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.685898 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:20:59 crc kubenswrapper[4865]: I0126 17:20:59.693383 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bn8p5"] Jan 26 17:21:00 crc kubenswrapper[4865]: I0126 17:21:00.367657 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b366d49f-5de5-403b-8011-b248df9a252a" path="/var/lib/kubelet/pods/b366d49f-5de5-403b-8011-b248df9a252a/volumes" Jan 26 17:21:04 crc kubenswrapper[4865]: I0126 17:21:04.519723 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:21:04 crc kubenswrapper[4865]: I0126 17:21:04.520419 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:21:34 crc kubenswrapper[4865]: I0126 17:21:34.512323 4865 patch_prober.go:28] interesting pod/machine-config-daemon-q8cb9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 17:21:34 crc kubenswrapper[4865]: I0126 17:21:34.512852 4865 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 17:21:34 crc kubenswrapper[4865]: I0126 17:21:34.512900 4865 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" Jan 26 17:21:34 crc kubenswrapper[4865]: I0126 17:21:34.513594 4865 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5"} pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 17:21:34 crc kubenswrapper[4865]: I0126 17:21:34.513654 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" containerName="machine-config-daemon" containerID="cri-o://f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" gracePeriod=600 Jan 26 17:21:35 crc kubenswrapper[4865]: I0126 17:21:35.145393 4865 scope.go:117] "RemoveContainer" containerID="6db6b555b47c5e9b75fb30cc29f9d8a987e025f4e0b2b45675a890cc3d9ff261" Jan 26 17:21:35 crc kubenswrapper[4865]: E0126 17:21:35.154871 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:21:35 crc kubenswrapper[4865]: I0126 17:21:35.627824 4865 generic.go:334] "Generic (PLEG): container finished" podID="0ddedab5-2528-4881-9251-9ba5334aea61" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" exitCode=0 Jan 26 17:21:35 crc kubenswrapper[4865]: I0126 17:21:35.627869 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerDied","Data":"f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5"} Jan 26 17:21:35 crc kubenswrapper[4865]: I0126 17:21:35.627954 4865 scope.go:117] "RemoveContainer" containerID="4a8168cbc289a20fb396355edbd9ff821c457b13b920d48ef22e5c0bf955b681" Jan 26 17:21:35 crc kubenswrapper[4865]: I0126 17:21:35.628536 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:21:35 crc kubenswrapper[4865]: E0126 17:21:35.628793 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:21:47 crc kubenswrapper[4865]: I0126 17:21:47.358638 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:21:47 crc kubenswrapper[4865]: E0126 17:21:47.359720 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:21:58 crc kubenswrapper[4865]: I0126 17:21:58.360244 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:21:58 crc kubenswrapper[4865]: E0126 17:21:58.361525 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:22:13 crc kubenswrapper[4865]: I0126 17:22:13.358036 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:22:13 crc kubenswrapper[4865]: E0126 17:22:13.358868 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:22:28 crc kubenswrapper[4865]: I0126 17:22:28.358317 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:22:28 crc kubenswrapper[4865]: E0126 17:22:28.359206 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:22:28 crc kubenswrapper[4865]: I0126 17:22:28.951069 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-3f89-account-create-update-d8gsn_78d9e25c-0c19-434f-aa78-264b8a4bb52e/mariadb-account-create-update/0.log" Jan 26 17:22:29 crc kubenswrapper[4865]: I0126 17:22:29.526560 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-5c779875cf-grrzh_b1441027-296e-4463-ac94-7fa503245019/keystone-api/0.log" Jan 26 17:22:30 crc kubenswrapper[4865]: I0126 17:22:30.376484 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-bootstrap-db774_97f5260e-bc1b-4e6f-8bee-8c9054039c3b/keystone-bootstrap/0.log" Jan 26 17:22:30 crc kubenswrapper[4865]: I0126 17:22:30.907553 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-create-fzdd2_ae7b537d-f915-4d41-8fd2-55f7dd75bd65/mariadb-database-create/0.log" Jan 26 17:22:32 crc kubenswrapper[4865]: I0126 17:22:32.816096 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-sync-zz8bs_241e06d3-d93d-4495-92db-79f641de0bdc/keystone-db-sync/0.log" Jan 26 17:22:33 crc kubenswrapper[4865]: I0126 17:22:33.576327 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_409c7131-e3b9-4bbc-a7db-51f9c150e354/memcached/0.log" Jan 26 17:22:34 crc kubenswrapper[4865]: I0126 17:22:34.107076 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_f970211e-cd46-409d-abc3-09d13259c370/galera/0.log" Jan 26 17:22:34 crc kubenswrapper[4865]: I0126 17:22:34.653275 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_2ae87188-ebdc-44a7-8504-5f61c2e4ea9a/galera/0.log" Jan 26 17:22:35 crc kubenswrapper[4865]: I0126 17:22:35.130050 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0/openstackclient/0.log" Jan 26 17:22:35 crc kubenswrapper[4865]: I0126 17:22:35.257080 4865 scope.go:117] "RemoveContainer" containerID="19b5de8f35ba366e45334edd75737da23579b25e16e573c8233911da747f8c60" Jan 26 17:22:35 crc kubenswrapper[4865]: I0126 17:22:35.591420 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-4c01-account-create-update-qgj5c_064698bb-635f-4b5c-8fa8-8f533ec421e1/mariadb-account-create-update/0.log" Jan 26 17:22:36 crc kubenswrapper[4865]: I0126 17:22:36.073114 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-75795794cb-9clbk_8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea/placement-log/0.log" Jan 26 17:22:36 crc kubenswrapper[4865]: I0126 17:22:36.483977 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-create-kvzzf_e320b6ce-4319-432a-96ba-9e1d54f795cc/mariadb-database-create/0.log" Jan 26 17:22:36 crc kubenswrapper[4865]: I0126 17:22:36.935841 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-sync-v8qx4_d1754996-361d-43dd-bb98-c8f8cc9875e2/placement-db-sync/0.log" Jan 26 17:22:37 crc kubenswrapper[4865]: I0126 17:22:37.394075 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_13ed1e3f-65dc-47e3-a8bc-c33f37660f23/rabbitmq/0.log" Jan 26 17:22:37 crc kubenswrapper[4865]: I0126 17:22:37.901537 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_0b48bb4a-936f-491b-b54a-a66bfcac547d/rabbitmq/0.log" Jan 26 17:22:38 crc kubenswrapper[4865]: I0126 17:22:38.353592 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_31ccc966-f964-468b-a364-0bb3360a4933/rabbitmq/0.log" Jan 26 17:22:38 crc kubenswrapper[4865]: I0126 17:22:38.761548 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_root-account-create-update-x64cv_c738f9ed-ff61-4d5f-84e8-a7fc9936423a/mariadb-account-create-update/0.log" Jan 26 17:22:42 crc kubenswrapper[4865]: I0126 17:22:42.357938 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:22:42 crc kubenswrapper[4865]: E0126 17:22:42.358612 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:22:55 crc kubenswrapper[4865]: I0126 17:22:55.357597 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:22:55 crc kubenswrapper[4865]: E0126 17:22:55.358579 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:23:06 crc kubenswrapper[4865]: I0126 17:23:06.357771 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:23:06 crc kubenswrapper[4865]: E0126 17:23:06.359411 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:23:11 crc kubenswrapper[4865]: I0126 17:23:11.226616 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s_95eb8aec-5bf1-4fad-8d94-937a4147fc38/extract/0.log" Jan 26 17:23:11 crc kubenswrapper[4865]: I0126 17:23:11.644597 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z_5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac/extract/0.log" Jan 26 17:23:12 crc kubenswrapper[4865]: I0126 17:23:12.068587 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7f86f8796f-bkj4l_e4eb0e9f-c220-4e50-9a7e-89da7fe708d9/manager/0.log" Jan 26 17:23:12 crc kubenswrapper[4865]: I0126 17:23:12.485694 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7478f7dbf9-8cwj6_b37c27a9-7292-41a4-92bb-584e9b492aa0/manager/0.log" Jan 26 17:23:12 crc kubenswrapper[4865]: I0126 17:23:12.926112 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-l5z5s_447fe2d4-cd26-4a3b-9cb0-5834991e70f4/manager/0.log" Jan 26 17:23:13 crc kubenswrapper[4865]: I0126 17:23:13.408879 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-jjnkn_eababfca-3249-49df-b6d4-364e669e4b1e/manager/0.log" Jan 26 17:23:13 crc kubenswrapper[4865]: I0126 17:23:13.835704 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-c2786_fb50fe13-a8bb-4115-85ab-677105257e40/manager/0.log" Jan 26 17:23:14 crc kubenswrapper[4865]: I0126 17:23:14.254134 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-t9v67_796eb6f5-8010-4397-9328-de3b605107be/manager/0.log" Jan 26 17:23:14 crc kubenswrapper[4865]: I0126 17:23:14.794795 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-694cf4f878-6ljdm_374004dc-56e6-4af1-9d53-0a14477e623f/manager/0.log" Jan 26 17:23:15 crc kubenswrapper[4865]: I0126 17:23:15.233483 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-598f7747c9-kblgz_1f630a72-d9f3-4af8-9b61-11b0551ad19f/manager/0.log" Jan 26 17:23:15 crc kubenswrapper[4865]: I0126 17:23:15.687831 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-xgdcd_de67db31-b314-42ed-ac57-68f9fc5aab7c/manager/0.log" Jan 26 17:23:16 crc kubenswrapper[4865]: I0126 17:23:16.171624 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-jdbmf_9532dd1b-a2e9-4e25-b6ae-55f9d9e42859/manager/0.log" Jan 26 17:23:16 crc kubenswrapper[4865]: I0126 17:23:16.688067 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-d2544_40656d26-d2a5-4728-a67c-1880fb430675/manager/0.log" Jan 26 17:23:17 crc kubenswrapper[4865]: I0126 17:23:17.095875 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78d58447c5-psb52_e65ea7be-88a0-4d65-ac76-b1956c034abd/manager/0.log" Jan 26 17:23:17 crc kubenswrapper[4865]: I0126 17:23:17.578139 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7fff5bf694-v2cdb_b271d3e5-244b-4ccc-96b1-b558035d5e32/manager/0.log" Jan 26 17:23:18 crc kubenswrapper[4865]: I0126 17:23:18.066884 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-jwcvf_4df291e0-8c70-47eb-a77e-e9f13e3fd3fc/registry-server/0.log" Jan 26 17:23:18 crc kubenswrapper[4865]: I0126 17:23:18.358174 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:23:18 crc kubenswrapper[4865]: E0126 17:23:18.358413 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:23:18 crc kubenswrapper[4865]: I0126 17:23:18.535408 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5f4cd88d46-7c4wx_d0457508-04e0-4b3e-a84a-097a47ee346e/manager/0.log" Jan 26 17:23:19 crc kubenswrapper[4865]: I0126 17:23:19.051308 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm_4444365f-5137-411d-ba00-25c9f9a7390f/manager/0.log" Jan 26 17:23:19 crc kubenswrapper[4865]: I0126 17:23:19.965599 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fc674d489-dq2vn_ab8ab562-a119-48c2-b431-abe25a789d74/manager/0.log" Jan 26 17:23:20 crc kubenswrapper[4865]: I0126 17:23:20.370305 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-25lgf_4b92d2ae-9860-4398-9efa-b97f65dd6136/registry-server/0.log" Jan 26 17:23:20 crc kubenswrapper[4865]: I0126 17:23:20.828596 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-zb94x_f7123b2b-df5a-4ae2-bfe6-f067fc318b5b/manager/0.log" Jan 26 17:23:21 crc kubenswrapper[4865]: I0126 17:23:21.323805 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-v2ff5_c0022f58-7356-41ad-a2da-362c9f9bfd73/manager/0.log" Jan 26 17:23:21 crc kubenswrapper[4865]: I0126 17:23:21.731681 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-9f7sb_dd2b4860-4a17-43e4-9909-afbe647519c4/operator/0.log" Jan 26 17:23:22 crc kubenswrapper[4865]: I0126 17:23:22.171197 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-lb26q_00f471fe-001b-40e6-91be-92a8f71bc951/manager/0.log" Jan 26 17:23:22 crc kubenswrapper[4865]: I0126 17:23:22.643583 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-gwrzm_e9f2346a-220f-4638-8faa-c2eba1914417/manager/0.log" Jan 26 17:23:23 crc kubenswrapper[4865]: I0126 17:23:23.105459 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-pk5pz_fcf22a02-e91a-43a4-9e2b-738373b94769/manager/0.log" Jan 26 17:23:23 crc kubenswrapper[4865]: I0126 17:23:23.548623 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-pksm4_68710299-edf4-4c26-b035-a33e03f89d5f/manager/0.log" Jan 26 17:23:28 crc kubenswrapper[4865]: I0126 17:23:28.654036 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-3f89-account-create-update-d8gsn_78d9e25c-0c19-434f-aa78-264b8a4bb52e/mariadb-account-create-update/0.log" Jan 26 17:23:29 crc kubenswrapper[4865]: I0126 17:23:29.187403 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-5c779875cf-grrzh_b1441027-296e-4463-ac94-7fa503245019/keystone-api/0.log" Jan 26 17:23:29 crc kubenswrapper[4865]: I0126 17:23:29.734823 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-bootstrap-db774_97f5260e-bc1b-4e6f-8bee-8c9054039c3b/keystone-bootstrap/0.log" Jan 26 17:23:31 crc kubenswrapper[4865]: I0126 17:23:31.061865 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-create-fzdd2_ae7b537d-f915-4d41-8fd2-55f7dd75bd65/mariadb-database-create/0.log" Jan 26 17:23:31 crc kubenswrapper[4865]: I0126 17:23:31.357320 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:23:31 crc kubenswrapper[4865]: E0126 17:23:31.357577 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:23:31 crc kubenswrapper[4865]: I0126 17:23:31.570191 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_keystone-db-sync-zz8bs_241e06d3-d93d-4495-92db-79f641de0bdc/keystone-db-sync/0.log" Jan 26 17:23:33 crc kubenswrapper[4865]: I0126 17:23:33.136793 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_memcached-0_409c7131-e3b9-4bbc-a7db-51f9c150e354/memcached/0.log" Jan 26 17:23:33 crc kubenswrapper[4865]: I0126 17:23:33.681747 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-cell1-galera-0_f970211e-cd46-409d-abc3-09d13259c370/galera/0.log" Jan 26 17:23:34 crc kubenswrapper[4865]: I0126 17:23:34.176443 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstack-galera-0_2ae87188-ebdc-44a7-8504-5f61c2e4ea9a/galera/0.log" Jan 26 17:23:34 crc kubenswrapper[4865]: I0126 17:23:34.591364 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_openstackclient_2474af4c-b4f1-4ff1-9ffb-6e26f8f6c9a0/openstackclient/0.log" Jan 26 17:23:35 crc kubenswrapper[4865]: I0126 17:23:35.016701 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-4c01-account-create-update-qgj5c_064698bb-635f-4b5c-8fa8-8f533ec421e1/mariadb-account-create-update/0.log" Jan 26 17:23:35 crc kubenswrapper[4865]: I0126 17:23:35.503548 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-75795794cb-9clbk_8537d9ef-d3c2-4fb9-b955-0ef7983eb4ea/placement-log/0.log" Jan 26 17:23:36 crc kubenswrapper[4865]: I0126 17:23:36.018885 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-create-kvzzf_e320b6ce-4319-432a-96ba-9e1d54f795cc/mariadb-database-create/0.log" Jan 26 17:23:36 crc kubenswrapper[4865]: I0126 17:23:36.466966 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_placement-db-sync-v8qx4_d1754996-361d-43dd-bb98-c8f8cc9875e2/placement-db-sync/0.log" Jan 26 17:23:36 crc kubenswrapper[4865]: I0126 17:23:36.969681 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-broadcaster-server-0_13ed1e3f-65dc-47e3-a8bc-c33f37660f23/rabbitmq/0.log" Jan 26 17:23:37 crc kubenswrapper[4865]: I0126 17:23:37.443233 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-cell1-server-0_0b48bb4a-936f-491b-b54a-a66bfcac547d/rabbitmq/0.log" Jan 26 17:23:37 crc kubenswrapper[4865]: I0126 17:23:37.920960 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_rabbitmq-server-0_31ccc966-f964-468b-a364-0bb3360a4933/rabbitmq/0.log" Jan 26 17:23:38 crc kubenswrapper[4865]: I0126 17:23:38.305927 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/nova-kuttl-default_root-account-create-update-x64cv_c738f9ed-ff61-4d5f-84e8-a7fc9936423a/mariadb-account-create-update/0.log" Jan 26 17:23:44 crc kubenswrapper[4865]: I0126 17:23:44.362852 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:23:44 crc kubenswrapper[4865]: E0126 17:23:44.363575 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:23:59 crc kubenswrapper[4865]: I0126 17:23:59.358412 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:23:59 crc kubenswrapper[4865]: E0126 17:23:59.359198 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:24:11 crc kubenswrapper[4865]: I0126 17:24:11.595181 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1d09387fb1a5afff7f113b17762ff553b3da62e2d26525adef646fb1c6w6d6s_95eb8aec-5bf1-4fad-8d94-937a4147fc38/extract/0.log" Jan 26 17:24:11 crc kubenswrapper[4865]: I0126 17:24:11.984844 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a2e6715c52e9904d8b44b00a99f99bc81a64b1fc7d62ec0e717535fb0b7dn8z_5b8d0e19-e4c9-44ec-aef9-4e001f21a0ac/extract/0.log" Jan 26 17:24:12 crc kubenswrapper[4865]: I0126 17:24:12.358696 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:24:12 crc kubenswrapper[4865]: E0126 17:24:12.359071 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:24:12 crc kubenswrapper[4865]: I0126 17:24:12.433559 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7f86f8796f-bkj4l_e4eb0e9f-c220-4e50-9a7e-89da7fe708d9/manager/0.log" Jan 26 17:24:12 crc kubenswrapper[4865]: I0126 17:24:12.955686 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-7478f7dbf9-8cwj6_b37c27a9-7292-41a4-92bb-584e9b492aa0/manager/0.log" Jan 26 17:24:13 crc kubenswrapper[4865]: I0126 17:24:13.442514 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-b45d7bf98-l5z5s_447fe2d4-cd26-4a3b-9cb0-5834991e70f4/manager/0.log" Jan 26 17:24:13 crc kubenswrapper[4865]: I0126 17:24:13.915051 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-78fdd796fd-jjnkn_eababfca-3249-49df-b6d4-364e669e4b1e/manager/0.log" Jan 26 17:24:14 crc kubenswrapper[4865]: I0126 17:24:14.399919 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-594c8c9d5d-c2786_fb50fe13-a8bb-4115-85ab-677105257e40/manager/0.log" Jan 26 17:24:14 crc kubenswrapper[4865]: I0126 17:24:14.889734 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-77d5c5b54f-t9v67_796eb6f5-8010-4397-9328-de3b605107be/manager/0.log" Jan 26 17:24:15 crc kubenswrapper[4865]: I0126 17:24:15.459049 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-694cf4f878-6ljdm_374004dc-56e6-4af1-9d53-0a14477e623f/manager/0.log" Jan 26 17:24:15 crc kubenswrapper[4865]: I0126 17:24:15.939866 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-598f7747c9-kblgz_1f630a72-d9f3-4af8-9b61-11b0551ad19f/manager/0.log" Jan 26 17:24:16 crc kubenswrapper[4865]: I0126 17:24:16.465820 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-b8b6d4659-xgdcd_de67db31-b314-42ed-ac57-68f9fc5aab7c/manager/0.log" Jan 26 17:24:16 crc kubenswrapper[4865]: I0126 17:24:16.894187 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-78c6999f6f-jdbmf_9532dd1b-a2e9-4e25-b6ae-55f9d9e42859/manager/0.log" Jan 26 17:24:17 crc kubenswrapper[4865]: I0126 17:24:17.332389 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-6b9fb5fdcb-d2544_40656d26-d2a5-4728-a67c-1880fb430675/manager/0.log" Jan 26 17:24:17 crc kubenswrapper[4865]: I0126 17:24:17.816651 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-78d58447c5-psb52_e65ea7be-88a0-4d65-ac76-b1956c034abd/manager/0.log" Jan 26 17:24:18 crc kubenswrapper[4865]: I0126 17:24:18.337623 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-7fff5bf694-v2cdb_b271d3e5-244b-4ccc-96b1-b558035d5e32/manager/0.log" Jan 26 17:24:18 crc kubenswrapper[4865]: I0126 17:24:18.795319 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-index-jwcvf_4df291e0-8c70-47eb-a77e-e9f13e3fd3fc/registry-server/0.log" Jan 26 17:24:19 crc kubenswrapper[4865]: I0126 17:24:19.232792 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5f4cd88d46-7c4wx_d0457508-04e0-4b3e-a84a-097a47ee346e/manager/0.log" Jan 26 17:24:19 crc kubenswrapper[4865]: I0126 17:24:19.754439 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6b68b8b854hhdtm_4444365f-5137-411d-ba00-25c9f9a7390f/manager/0.log" Jan 26 17:24:20 crc kubenswrapper[4865]: I0126 17:24:20.388168 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-7fc674d489-dq2vn_ab8ab562-a119-48c2-b431-abe25a789d74/manager/0.log" Jan 26 17:24:20 crc kubenswrapper[4865]: I0126 17:24:20.861253 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-25lgf_4b92d2ae-9860-4398-9efa-b97f65dd6136/registry-server/0.log" Jan 26 17:24:21 crc kubenswrapper[4865]: I0126 17:24:21.286551 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-6f75f45d54-zb94x_f7123b2b-df5a-4ae2-bfe6-f067fc318b5b/manager/0.log" Jan 26 17:24:21 crc kubenswrapper[4865]: I0126 17:24:21.755502 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-79d5ccc684-v2ff5_c0022f58-7356-41ad-a2da-362c9f9bfd73/manager/0.log" Jan 26 17:24:22 crc kubenswrapper[4865]: I0126 17:24:22.176120 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-9f7sb_dd2b4860-4a17-43e4-9909-afbe647519c4/operator/0.log" Jan 26 17:24:22 crc kubenswrapper[4865]: I0126 17:24:22.715716 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-547cbdb99f-lb26q_00f471fe-001b-40e6-91be-92a8f71bc951/manager/0.log" Jan 26 17:24:23 crc kubenswrapper[4865]: I0126 17:24:23.208371 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-85cd9769bb-gwrzm_e9f2346a-220f-4638-8faa-c2eba1914417/manager/0.log" Jan 26 17:24:23 crc kubenswrapper[4865]: I0126 17:24:23.713403 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-69797bbcbd-pk5pz_fcf22a02-e91a-43a4-9e2b-738373b94769/manager/0.log" Jan 26 17:24:24 crc kubenswrapper[4865]: I0126 17:24:24.152125 4865 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-564965969-pksm4_68710299-edf4-4c26-b035-a33e03f89d5f/manager/0.log" Jan 26 17:24:26 crc kubenswrapper[4865]: I0126 17:24:26.358317 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:24:26 crc kubenswrapper[4865]: E0126 17:24:26.359818 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:24:39 crc kubenswrapper[4865]: I0126 17:24:39.357959 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:24:39 crc kubenswrapper[4865]: E0126 17:24:39.358661 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.012844 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-xhvrg/must-gather-52gp9"] Jan 26 17:24:46 crc kubenswrapper[4865]: E0126 17:24:46.013837 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="registry-server" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.013854 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="registry-server" Jan 26 17:24:46 crc kubenswrapper[4865]: E0126 17:24:46.013889 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="extract-utilities" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.013897 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="extract-utilities" Jan 26 17:24:46 crc kubenswrapper[4865]: E0126 17:24:46.013912 4865 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="extract-content" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.013920 4865 state_mem.go:107] "Deleted CPUSet assignment" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="extract-content" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.014147 4865 memory_manager.go:354] "RemoveStaleState removing state" podUID="b366d49f-5de5-403b-8011-b248df9a252a" containerName="registry-server" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.015205 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.020006 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xhvrg"/"kube-root-ca.crt" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.020362 4865 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-xhvrg"/"default-dockercfg-c6zp8" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.020533 4865 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-xhvrg"/"openshift-service-ca.crt" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.043693 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xhvrg/must-gather-52gp9"] Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.130380 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.130581 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5cvw\" (UniqueName: \"kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.232607 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5cvw\" (UniqueName: \"kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.232751 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.233426 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.256798 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5cvw\" (UniqueName: \"kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw\") pod \"must-gather-52gp9\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.335111 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.697405 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.700450 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.706480 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.743507 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tnhs\" (UniqueName: \"kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.743567 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.743720 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.803511 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-xhvrg/must-gather-52gp9"] Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.844771 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.845450 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.846484 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tnhs\" (UniqueName: \"kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.846905 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.847205 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:46 crc kubenswrapper[4865]: I0126 17:24:46.870863 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tnhs\" (UniqueName: \"kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs\") pod \"redhat-marketplace-fs7x2\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:47 crc kubenswrapper[4865]: I0126 17:24:47.026936 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:24:47 crc kubenswrapper[4865]: I0126 17:24:47.548176 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:24:47 crc kubenswrapper[4865]: I0126 17:24:47.626148 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-xhvrg/must-gather-52gp9" event={"ID":"8111756d-5a79-4b7d-a5de-b04a1e7ff74c","Type":"ContainerStarted","Data":"79c1bc7b9b8edc8f6833d935a28b953380ce3329b6f17c41636d811eaf947713"} Jan 26 17:24:47 crc kubenswrapper[4865]: I0126 17:24:47.627168 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerStarted","Data":"c5c396b0dcfde16fb2cc317d19d91998d1a84097d3ac4b64667cc6b9659cdc33"} Jan 26 17:24:48 crc kubenswrapper[4865]: I0126 17:24:48.638649 4865 generic.go:334] "Generic (PLEG): container finished" podID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" containerID="cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179" exitCode=0 Jan 26 17:24:48 crc kubenswrapper[4865]: I0126 17:24:48.640343 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerDied","Data":"cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179"} Jan 26 17:24:48 crc kubenswrapper[4865]: I0126 17:24:48.896797 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s9hg2"] Jan 26 17:24:48 crc kubenswrapper[4865]: I0126 17:24:48.898576 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:48 crc kubenswrapper[4865]: I0126 17:24:48.921176 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9hg2"] Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.002844 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-catalog-content\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.003195 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-utilities\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.003318 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxvd4\" (UniqueName: \"kubernetes.io/projected/dfb80e34-5603-429e-8d37-4160788dae76-kube-api-access-fxvd4\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.104693 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-catalog-content\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.104790 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-utilities\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.104832 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxvd4\" (UniqueName: \"kubernetes.io/projected/dfb80e34-5603-429e-8d37-4160788dae76-kube-api-access-fxvd4\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.105340 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-utilities\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.105353 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dfb80e34-5603-429e-8d37-4160788dae76-catalog-content\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.124354 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxvd4\" (UniqueName: \"kubernetes.io/projected/dfb80e34-5603-429e-8d37-4160788dae76-kube-api-access-fxvd4\") pod \"certified-operators-s9hg2\" (UID: \"dfb80e34-5603-429e-8d37-4160788dae76\") " pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.220385 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:24:49 crc kubenswrapper[4865]: I0126 17:24:49.827692 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9hg2"] Jan 26 17:24:49 crc kubenswrapper[4865]: W0126 17:24:49.842905 4865 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfb80e34_5603_429e_8d37_4160788dae76.slice/crio-d0b32d427bd7f8e1c5014147af61fc31f7cc345f70df89bacdfef4c2abd6ca58 WatchSource:0}: Error finding container d0b32d427bd7f8e1c5014147af61fc31f7cc345f70df89bacdfef4c2abd6ca58: Status 404 returned error can't find the container with id d0b32d427bd7f8e1c5014147af61fc31f7cc345f70df89bacdfef4c2abd6ca58 Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.668016 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9hg2" event={"ID":"dfb80e34-5603-429e-8d37-4160788dae76","Type":"ContainerStarted","Data":"d0b32d427bd7f8e1c5014147af61fc31f7cc345f70df89bacdfef4c2abd6ca58"} Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.767808 4865 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.769475 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.780253 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.837740 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.837854 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.837918 4865 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24x6x\" (UniqueName: \"kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.939331 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.939389 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24x6x\" (UniqueName: \"kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.939418 4865 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.939979 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.940066 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:50 crc kubenswrapper[4865]: I0126 17:24:50.959625 4865 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24x6x\" (UniqueName: \"kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x\") pod \"redhat-operators-ssm94\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:51 crc kubenswrapper[4865]: I0126 17:24:51.091856 4865 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:24:52 crc kubenswrapper[4865]: I0126 17:24:52.357943 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:24:52 crc kubenswrapper[4865]: E0126 17:24:52.358582 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:25:01 crc kubenswrapper[4865]: E0126 17:25:01.549135 4865 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/openstack-must-gather:latest" Jan 26 17:25:01 crc kubenswrapper[4865]: E0126 17:25:01.549865 4865 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 26 17:25:01 crc kubenswrapper[4865]: container &Container{Name:gather,Image:quay.io/openstack-k8s-operators/openstack-must-gather:latest,Command:[/bin/bash -c if command -v setsid >/dev/null 2>&1 && command -v ps >/dev/null 2>&1 && command -v pkill >/dev/null 2>&1; then Jan 26 17:25:01 crc kubenswrapper[4865]: HAVE_SESSION_TOOLS=true Jan 26 17:25:01 crc kubenswrapper[4865]: else Jan 26 17:25:01 crc kubenswrapper[4865]: HAVE_SESSION_TOOLS=false Jan 26 17:25:01 crc kubenswrapper[4865]: fi Jan 26 17:25:01 crc kubenswrapper[4865]: Jan 26 17:25:01 crc kubenswrapper[4865]: Jan 26 17:25:01 crc kubenswrapper[4865]: echo "[disk usage checker] Started" Jan 26 17:25:01 crc kubenswrapper[4865]: target_dir="/must-gather" Jan 26 17:25:01 crc kubenswrapper[4865]: usage_percentage_limit="80" Jan 26 17:25:01 crc kubenswrapper[4865]: while true; do Jan 26 17:25:01 crc kubenswrapper[4865]: usage_percentage=$(df -P "$target_dir" | awk 'NR==2 {print $5}' | sed 's/%//') Jan 26 17:25:01 crc kubenswrapper[4865]: echo "[disk usage checker] Volume usage percentage: current = ${usage_percentage} ; allowed = ${usage_percentage_limit}" Jan 26 17:25:01 crc kubenswrapper[4865]: if [ "$usage_percentage" -gt "$usage_percentage_limit" ]; then Jan 26 17:25:01 crc kubenswrapper[4865]: echo "[disk usage checker] Disk usage exceeds the volume percentage of ${usage_percentage_limit} for mounted directory, terminating..." Jan 26 17:25:01 crc kubenswrapper[4865]: if [ "$HAVE_SESSION_TOOLS" = "true" ]; then Jan 26 17:25:01 crc kubenswrapper[4865]: ps -o sess --no-headers | sort -u | while read sid; do Jan 26 17:25:01 crc kubenswrapper[4865]: [[ "$sid" -eq "${$}" ]] && continue Jan 26 17:25:01 crc kubenswrapper[4865]: pkill --signal SIGKILL --session "$sid" Jan 26 17:25:01 crc kubenswrapper[4865]: done Jan 26 17:25:01 crc kubenswrapper[4865]: else Jan 26 17:25:01 crc kubenswrapper[4865]: kill 0 Jan 26 17:25:01 crc kubenswrapper[4865]: fi Jan 26 17:25:01 crc kubenswrapper[4865]: exit 1 Jan 26 17:25:01 crc kubenswrapper[4865]: fi Jan 26 17:25:01 crc kubenswrapper[4865]: sleep 5 Jan 26 17:25:01 crc kubenswrapper[4865]: done & if [ "$HAVE_SESSION_TOOLS" = "true" ]; then Jan 26 17:25:01 crc kubenswrapper[4865]: setsid -w bash <<-MUSTGATHER_EOF Jan 26 17:25:01 crc kubenswrapper[4865]: ADDITIONAL_NAMESPACES=kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko OPENSTACK_DATABASES=ALL SOS_EDPM=all OMC=False SOS_DECOMPRESS=0 gather Jan 26 17:25:01 crc kubenswrapper[4865]: MUSTGATHER_EOF Jan 26 17:25:01 crc kubenswrapper[4865]: else Jan 26 17:25:01 crc kubenswrapper[4865]: ADDITIONAL_NAMESPACES=kuttl,openshift-storage,openshift-marketplace,openshift-operators,sushy-emulator,tobiko OPENSTACK_DATABASES=ALL SOS_EDPM=all OMC=False SOS_DECOMPRESS=0 gather Jan 26 17:25:01 crc kubenswrapper[4865]: fi; sync && echo 'Caches written to disk'],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:must-gather-output,ReadOnly:false,MountPath:/must-gather,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k5cvw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod must-gather-52gp9_openshift-must-gather-xhvrg(8111756d-5a79-4b7d-a5de-b04a1e7ff74c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Jan 26 17:25:01 crc kubenswrapper[4865]: > logger="UnhandledError" Jan 26 17:25:01 crc kubenswrapper[4865]: E0126 17:25:01.552207 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"gather\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"copy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-must-gather:latest\\\"\"]" pod="openshift-must-gather-xhvrg/must-gather-52gp9" podUID="8111756d-5a79-4b7d-a5de-b04a1e7ff74c" Jan 26 17:25:01 crc kubenswrapper[4865]: I0126 17:25:01.781320 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerStarted","Data":"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119"} Jan 26 17:25:01 crc kubenswrapper[4865]: I0126 17:25:01.783406 4865 generic.go:334] "Generic (PLEG): container finished" podID="dfb80e34-5603-429e-8d37-4160788dae76" containerID="c997ed1903d2d48704bb6006795b5c17ed1b1b93de7ceea59a1f0b9b8ade0591" exitCode=0 Jan 26 17:25:01 crc kubenswrapper[4865]: I0126 17:25:01.783468 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9hg2" event={"ID":"dfb80e34-5603-429e-8d37-4160788dae76","Type":"ContainerDied","Data":"c997ed1903d2d48704bb6006795b5c17ed1b1b93de7ceea59a1f0b9b8ade0591"} Jan 26 17:25:01 crc kubenswrapper[4865]: E0126 17:25:01.792455 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"gather\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-must-gather:latest\\\"\", failed to \"StartContainer\" for \"copy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/openstack-must-gather:latest\\\"\"]" pod="openshift-must-gather-xhvrg/must-gather-52gp9" podUID="8111756d-5a79-4b7d-a5de-b04a1e7ff74c" Jan 26 17:25:01 crc kubenswrapper[4865]: I0126 17:25:01.873010 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:25:02 crc kubenswrapper[4865]: I0126 17:25:02.795923 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerStarted","Data":"b884fa0de9c6597cd659e67f1833cbc262c82aa3b4f6494550a8afd168c4f6c5"} Jan 26 17:25:03 crc kubenswrapper[4865]: I0126 17:25:03.806425 4865 generic.go:334] "Generic (PLEG): container finished" podID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" containerID="e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119" exitCode=0 Jan 26 17:25:03 crc kubenswrapper[4865]: I0126 17:25:03.806494 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerDied","Data":"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119"} Jan 26 17:25:06 crc kubenswrapper[4865]: I0126 17:25:06.358383 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:25:06 crc kubenswrapper[4865]: E0126 17:25:06.359006 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:25:06 crc kubenswrapper[4865]: I0126 17:25:06.833324 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerStarted","Data":"dc850f2f25f83739dfdac08aef6437765f329cdbe238f7adda5ace2b26aa37aa"} Jan 26 17:25:09 crc kubenswrapper[4865]: I0126 17:25:09.866491 4865 generic.go:334] "Generic (PLEG): container finished" podID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" containerID="dc850f2f25f83739dfdac08aef6437765f329cdbe238f7adda5ace2b26aa37aa" exitCode=0 Jan 26 17:25:09 crc kubenswrapper[4865]: I0126 17:25:09.866616 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerDied","Data":"dc850f2f25f83739dfdac08aef6437765f329cdbe238f7adda5ace2b26aa37aa"} Jan 26 17:25:11 crc kubenswrapper[4865]: I0126 17:25:11.550119 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-xhvrg/must-gather-52gp9"] Jan 26 17:25:11 crc kubenswrapper[4865]: I0126 17:25:11.562847 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-xhvrg/must-gather-52gp9"] Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.866694 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.887103 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-xhvrg/must-gather-52gp9" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.966036 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output\") pod \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.966150 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5cvw\" (UniqueName: \"kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw\") pod \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\" (UID: \"8111756d-5a79-4b7d-a5de-b04a1e7ff74c\") " Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.966493 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "8111756d-5a79-4b7d-a5de-b04a1e7ff74c" (UID: "8111756d-5a79-4b7d-a5de-b04a1e7ff74c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:11.972685 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw" (OuterVolumeSpecName: "kube-api-access-k5cvw") pod "8111756d-5a79-4b7d-a5de-b04a1e7ff74c" (UID: "8111756d-5a79-4b7d-a5de-b04a1e7ff74c"). InnerVolumeSpecName "kube-api-access-k5cvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:12.067982 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5cvw\" (UniqueName: \"kubernetes.io/projected/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-kube-api-access-k5cvw\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:12.068074 4865 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/8111756d-5a79-4b7d-a5de-b04a1e7ff74c-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:19 crc kubenswrapper[4865]: I0126 17:25:12.387872 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8111756d-5a79-4b7d-a5de-b04a1e7ff74c" path="/var/lib/kubelet/pods/8111756d-5a79-4b7d-a5de-b04a1e7ff74c/volumes" Jan 26 17:25:20 crc kubenswrapper[4865]: I0126 17:25:20.362201 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:25:20 crc kubenswrapper[4865]: E0126 17:25:20.363106 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:25:22 crc kubenswrapper[4865]: I0126 17:25:22.995833 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9hg2" event={"ID":"dfb80e34-5603-429e-8d37-4160788dae76","Type":"ContainerStarted","Data":"8bbc28b3ef6d6818a57c0dbde24811fb764c86a65e2a88c84e83656fc2f28175"} Jan 26 17:25:22 crc kubenswrapper[4865]: I0126 17:25:22.997521 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerStarted","Data":"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1"} Jan 26 17:25:23 crc kubenswrapper[4865]: I0126 17:25:23.002520 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerStarted","Data":"740510c0bc68cb4aa212693c5adeccae9a0a50e204fd7ae5d1452ebf81881a76"} Jan 26 17:25:23 crc kubenswrapper[4865]: I0126 17:25:23.036027 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fs7x2" podStartSLOduration=3.925276745 podStartE2EDuration="37.035968999s" podCreationTimestamp="2026-01-26 17:24:46 +0000 UTC" firstStartedPulling="2026-01-26 17:24:48.641944864 +0000 UTC m=+1816.225830451" lastFinishedPulling="2026-01-26 17:25:21.752637118 +0000 UTC m=+1849.336522705" observedRunningTime="2026-01-26 17:25:23.030133923 +0000 UTC m=+1850.614019510" watchObservedRunningTime="2026-01-26 17:25:23.035968999 +0000 UTC m=+1850.619854586" Jan 26 17:25:24 crc kubenswrapper[4865]: I0126 17:25:24.038826 4865 generic.go:334] "Generic (PLEG): container finished" podID="dfb80e34-5603-429e-8d37-4160788dae76" containerID="8bbc28b3ef6d6818a57c0dbde24811fb764c86a65e2a88c84e83656fc2f28175" exitCode=0 Jan 26 17:25:24 crc kubenswrapper[4865]: I0126 17:25:24.039214 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9hg2" event={"ID":"dfb80e34-5603-429e-8d37-4160788dae76","Type":"ContainerDied","Data":"8bbc28b3ef6d6818a57c0dbde24811fb764c86a65e2a88c84e83656fc2f28175"} Jan 26 17:25:26 crc kubenswrapper[4865]: I0126 17:25:26.059943 4865 generic.go:334] "Generic (PLEG): container finished" podID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" containerID="740510c0bc68cb4aa212693c5adeccae9a0a50e204fd7ae5d1452ebf81881a76" exitCode=0 Jan 26 17:25:26 crc kubenswrapper[4865]: I0126 17:25:26.060037 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerDied","Data":"740510c0bc68cb4aa212693c5adeccae9a0a50e204fd7ae5d1452ebf81881a76"} Jan 26 17:25:27 crc kubenswrapper[4865]: I0126 17:25:27.027947 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:27 crc kubenswrapper[4865]: I0126 17:25:27.028368 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:27 crc kubenswrapper[4865]: I0126 17:25:27.057558 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-create-fzdd2"] Jan 26 17:25:27 crc kubenswrapper[4865]: I0126 17:25:27.069254 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-create-fzdd2"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.029581 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-create-kvzzf"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.036817 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-4c01-account-create-update-qgj5c"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.045668 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-3f89-account-create-update-d8gsn"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.049061 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-create-kvzzf"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.054528 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-3f89-account-create-update-d8gsn"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.060208 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-4c01-account-create-update-qgj5c"] Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.088306 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-fs7x2" podUID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" containerName="registry-server" probeResult="failure" output=< Jan 26 17:25:28 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:25:28 crc kubenswrapper[4865]: > Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.385223 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="064698bb-635f-4b5c-8fa8-8f533ec421e1" path="/var/lib/kubelet/pods/064698bb-635f-4b5c-8fa8-8f533ec421e1/volumes" Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.385982 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78d9e25c-0c19-434f-aa78-264b8a4bb52e" path="/var/lib/kubelet/pods/78d9e25c-0c19-434f-aa78-264b8a4bb52e/volumes" Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.386728 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae7b537d-f915-4d41-8fd2-55f7dd75bd65" path="/var/lib/kubelet/pods/ae7b537d-f915-4d41-8fd2-55f7dd75bd65/volumes" Jan 26 17:25:28 crc kubenswrapper[4865]: I0126 17:25:28.387628 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e320b6ce-4319-432a-96ba-9e1d54f795cc" path="/var/lib/kubelet/pods/e320b6ce-4319-432a-96ba-9e1d54f795cc/volumes" Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.087867 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s9hg2" event={"ID":"dfb80e34-5603-429e-8d37-4160788dae76","Type":"ContainerStarted","Data":"3ff80d1f709a97c8ce3763af37c7080fc3fd5548e36af5700fdc9a2917d58206"} Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.090782 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerStarted","Data":"94b5dc8c17ef863a44011d3eaad0522af50684db2c1b33bccf41cc9af085f9d2"} Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.109495 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s9hg2" podStartSLOduration=17.136858902 podStartE2EDuration="41.109477334s" podCreationTimestamp="2026-01-26 17:24:48 +0000 UTC" firstStartedPulling="2026-01-26 17:25:04.816515674 +0000 UTC m=+1832.400401261" lastFinishedPulling="2026-01-26 17:25:28.789134106 +0000 UTC m=+1856.373019693" observedRunningTime="2026-01-26 17:25:29.10545917 +0000 UTC m=+1856.689344757" watchObservedRunningTime="2026-01-26 17:25:29.109477334 +0000 UTC m=+1856.693362921" Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.132908 4865 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ssm94" podStartSLOduration=21.206902448 podStartE2EDuration="39.132883242s" podCreationTimestamp="2026-01-26 17:24:50 +0000 UTC" firstStartedPulling="2026-01-26 17:25:10.878685455 +0000 UTC m=+1838.462571042" lastFinishedPulling="2026-01-26 17:25:28.804666249 +0000 UTC m=+1856.388551836" observedRunningTime="2026-01-26 17:25:29.125223254 +0000 UTC m=+1856.709108861" watchObservedRunningTime="2026-01-26 17:25:29.132883242 +0000 UTC m=+1856.716768829" Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.221402 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:25:29 crc kubenswrapper[4865]: I0126 17:25:29.221478 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:25:30 crc kubenswrapper[4865]: I0126 17:25:30.290246 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-s9hg2" podUID="dfb80e34-5603-429e-8d37-4160788dae76" containerName="registry-server" probeResult="failure" output=< Jan 26 17:25:30 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:25:30 crc kubenswrapper[4865]: > Jan 26 17:25:31 crc kubenswrapper[4865]: I0126 17:25:31.092503 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:31 crc kubenswrapper[4865]: I0126 17:25:31.092862 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:32 crc kubenswrapper[4865]: I0126 17:25:32.163384 4865 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-ssm94" podUID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" containerName="registry-server" probeResult="failure" output=< Jan 26 17:25:32 crc kubenswrapper[4865]: timeout: failed to connect service ":50051" within 1s Jan 26 17:25:32 crc kubenswrapper[4865]: > Jan 26 17:25:35 crc kubenswrapper[4865]: I0126 17:25:35.350215 4865 scope.go:117] "RemoveContainer" containerID="70d9caa7b8c3fda40ff79b0a19ad382f3ae7aa6f281c0130a51a4681a1a102c7" Jan 26 17:25:35 crc kubenswrapper[4865]: I0126 17:25:35.358408 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:25:35 crc kubenswrapper[4865]: E0126 17:25:35.358691 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:25:35 crc kubenswrapper[4865]: I0126 17:25:35.382120 4865 scope.go:117] "RemoveContainer" containerID="4349c6b16a21cc941d2d5e4dc9e43cd86926ff2ec6a859ae24a80c29327e3c03" Jan 26 17:25:35 crc kubenswrapper[4865]: I0126 17:25:35.421420 4865 scope.go:117] "RemoveContainer" containerID="298fb6fc3b01065bcc70a7125bedc91845091191aa24851e4d663c83ad588381" Jan 26 17:25:35 crc kubenswrapper[4865]: I0126 17:25:35.450185 4865 scope.go:117] "RemoveContainer" containerID="d2f8921b572e864cb5f960a3ebf95caeb463c3155e687d4a7a032004b7c71680" Jan 26 17:25:37 crc kubenswrapper[4865]: I0126 17:25:37.077512 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:37 crc kubenswrapper[4865]: I0126 17:25:37.123018 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:37 crc kubenswrapper[4865]: I0126 17:25:37.324862 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:25:38 crc kubenswrapper[4865]: I0126 17:25:38.156754 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-fs7x2" podUID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" containerName="registry-server" containerID="cri-o://7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1" gracePeriod=2 Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.112499 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.167185 4865 generic.go:334] "Generic (PLEG): container finished" podID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" containerID="7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1" exitCode=0 Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.167237 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerDied","Data":"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1"} Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.167248 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fs7x2" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.167273 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fs7x2" event={"ID":"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1","Type":"ContainerDied","Data":"c5c396b0dcfde16fb2cc317d19d91998d1a84097d3ac4b64667cc6b9659cdc33"} Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.167295 4865 scope.go:117] "RemoveContainer" containerID="7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.193820 4865 scope.go:117] "RemoveContainer" containerID="e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.195171 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities\") pod \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.195399 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tnhs\" (UniqueName: \"kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs\") pod \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.195493 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content\") pod \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\" (UID: \"4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1\") " Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.196089 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities" (OuterVolumeSpecName: "utilities") pod "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" (UID: "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.204754 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs" (OuterVolumeSpecName: "kube-api-access-4tnhs") pod "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" (UID: "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1"). InnerVolumeSpecName "kube-api-access-4tnhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.224655 4865 scope.go:117] "RemoveContainer" containerID="cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.233641 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" (UID: "4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.268023 4865 scope.go:117] "RemoveContainer" containerID="7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1" Jan 26 17:25:39 crc kubenswrapper[4865]: E0126 17:25:39.268623 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1\": container with ID starting with 7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1 not found: ID does not exist" containerID="7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.268674 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1"} err="failed to get container status \"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1\": rpc error: code = NotFound desc = could not find container \"7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1\": container with ID starting with 7ff0c3a4539ea8ad127b907b574989f8367d1085a3fc1716db364c9542f8dea1 not found: ID does not exist" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.268701 4865 scope.go:117] "RemoveContainer" containerID="e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119" Jan 26 17:25:39 crc kubenswrapper[4865]: E0126 17:25:39.269265 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119\": container with ID starting with e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119 not found: ID does not exist" containerID="e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.269307 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119"} err="failed to get container status \"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119\": rpc error: code = NotFound desc = could not find container \"e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119\": container with ID starting with e4e16667b98a42b5ed27972455bff39fedbf9aa342790a2107736be190707119 not found: ID does not exist" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.269336 4865 scope.go:117] "RemoveContainer" containerID="cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179" Jan 26 17:25:39 crc kubenswrapper[4865]: E0126 17:25:39.269685 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179\": container with ID starting with cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179 not found: ID does not exist" containerID="cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.269739 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179"} err="failed to get container status \"cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179\": rpc error: code = NotFound desc = could not find container \"cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179\": container with ID starting with cb69513b9ad3ff0c16e744128aaffedcb7a271f64e4c187c2fbd4533c9d87179 not found: ID does not exist" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.277948 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.297751 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tnhs\" (UniqueName: \"kubernetes.io/projected/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-kube-api-access-4tnhs\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.297792 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.297806 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.323775 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s9hg2" Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.507461 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:25:39 crc kubenswrapper[4865]: I0126 17:25:39.516341 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-fs7x2"] Jan 26 17:25:40 crc kubenswrapper[4865]: I0126 17:25:40.369152 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1" path="/var/lib/kubelet/pods/4815b73b-f1f0-40d3-aa6e-cf8e2dc9f5d1/volumes" Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.035622 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/root-account-create-update-x64cv"] Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.041862 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/root-account-create-update-x64cv"] Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.131731 4865 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s9hg2"] Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.170303 4865 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.231932 4865 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.512435 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.512771 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sjpcq" podUID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" containerName="registry-server" containerID="cri-o://9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" gracePeriod=2 Jan 26 17:25:41 crc kubenswrapper[4865]: E0126 17:25:41.892875 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 is running failed: container process not found" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:25:41 crc kubenswrapper[4865]: E0126 17:25:41.893377 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 is running failed: container process not found" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:25:41 crc kubenswrapper[4865]: E0126 17:25:41.893726 4865 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 is running failed: container process not found" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 17:25:41 crc kubenswrapper[4865]: E0126 17:25:41.893781 4865 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-sjpcq" podUID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" containerName="registry-server" Jan 26 17:25:41 crc kubenswrapper[4865]: I0126 17:25:41.988456 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.179596 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmdsn\" (UniqueName: \"kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn\") pod \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.179729 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content\") pod \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.179812 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities\") pod \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\" (UID: \"97ff386d-ae5d-42d7-9e8f-60c2d88716bd\") " Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.180516 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities" (OuterVolumeSpecName: "utilities") pod "97ff386d-ae5d-42d7-9e8f-60c2d88716bd" (UID: "97ff386d-ae5d-42d7-9e8f-60c2d88716bd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.185232 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn" (OuterVolumeSpecName: "kube-api-access-mmdsn") pod "97ff386d-ae5d-42d7-9e8f-60c2d88716bd" (UID: "97ff386d-ae5d-42d7-9e8f-60c2d88716bd"). InnerVolumeSpecName "kube-api-access-mmdsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.196104 4865 generic.go:334] "Generic (PLEG): container finished" podID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" exitCode=0 Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.196143 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerDied","Data":"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53"} Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.196168 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sjpcq" event={"ID":"97ff386d-ae5d-42d7-9e8f-60c2d88716bd","Type":"ContainerDied","Data":"d4629acaa149058f5a8cc6e405c4f18336adb61a336cfd859eb9563fa99a121f"} Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.196184 4865 scope.go:117] "RemoveContainer" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.196320 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sjpcq" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.243943 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97ff386d-ae5d-42d7-9e8f-60c2d88716bd" (UID: "97ff386d-ae5d-42d7-9e8f-60c2d88716bd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.251173 4865 scope.go:117] "RemoveContainer" containerID="6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.281755 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmdsn\" (UniqueName: \"kubernetes.io/projected/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-kube-api-access-mmdsn\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.281792 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.281801 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97ff386d-ae5d-42d7-9e8f-60c2d88716bd-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.289931 4865 scope.go:117] "RemoveContainer" containerID="9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.324384 4865 scope.go:117] "RemoveContainer" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" Jan 26 17:25:42 crc kubenswrapper[4865]: E0126 17:25:42.325210 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53\": container with ID starting with 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 not found: ID does not exist" containerID="9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.325265 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53"} err="failed to get container status \"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53\": rpc error: code = NotFound desc = could not find container \"9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53\": container with ID starting with 9d011b80e86ad9f6c9b5e061ca29dda26f44c696bb73b34e5133031c975f5e53 not found: ID does not exist" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.325288 4865 scope.go:117] "RemoveContainer" containerID="6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89" Jan 26 17:25:42 crc kubenswrapper[4865]: E0126 17:25:42.325545 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89\": container with ID starting with 6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89 not found: ID does not exist" containerID="6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.325586 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89"} err="failed to get container status \"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89\": rpc error: code = NotFound desc = could not find container \"6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89\": container with ID starting with 6e8b4e1f06fb959680e450d6d8cab5dafdd8fb338d6252ccbf385d12a8f12f89 not found: ID does not exist" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.325602 4865 scope.go:117] "RemoveContainer" containerID="9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da" Jan 26 17:25:42 crc kubenswrapper[4865]: E0126 17:25:42.328447 4865 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da\": container with ID starting with 9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da not found: ID does not exist" containerID="9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.328475 4865 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da"} err="failed to get container status \"9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da\": rpc error: code = NotFound desc = could not find container \"9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da\": container with ID starting with 9472e67df51b41aa03dfe2dc0dc0bfb7375352725604e64d41d37df6a47219da not found: ID does not exist" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.367561 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c738f9ed-ff61-4d5f-84e8-a7fc9936423a" path="/var/lib/kubelet/pods/c738f9ed-ff61-4d5f-84e8-a7fc9936423a/volumes" Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.519577 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:25:42 crc kubenswrapper[4865]: I0126 17:25:42.526846 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sjpcq"] Jan 26 17:25:43 crc kubenswrapper[4865]: I0126 17:25:43.911562 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:25:43 crc kubenswrapper[4865]: I0126 17:25:43.912171 4865 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ssm94" podUID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" containerName="registry-server" containerID="cri-o://94b5dc8c17ef863a44011d3eaad0522af50684db2c1b33bccf41cc9af085f9d2" gracePeriod=2 Jan 26 17:25:44 crc kubenswrapper[4865]: I0126 17:25:44.213734 4865 generic.go:334] "Generic (PLEG): container finished" podID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" containerID="94b5dc8c17ef863a44011d3eaad0522af50684db2c1b33bccf41cc9af085f9d2" exitCode=0 Jan 26 17:25:44 crc kubenswrapper[4865]: I0126 17:25:44.213787 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerDied","Data":"94b5dc8c17ef863a44011d3eaad0522af50684db2c1b33bccf41cc9af085f9d2"} Jan 26 17:25:44 crc kubenswrapper[4865]: I0126 17:25:44.366823 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97ff386d-ae5d-42d7-9e8f-60c2d88716bd" path="/var/lib/kubelet/pods/97ff386d-ae5d-42d7-9e8f-60c2d88716bd/volumes" Jan 26 17:25:44 crc kubenswrapper[4865]: I0126 17:25:44.963842 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.128115 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities\") pod \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.128173 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24x6x\" (UniqueName: \"kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x\") pod \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.128294 4865 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content\") pod \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\" (UID: \"7566c48c-5ed7-4e63-a0ff-59af87792dfd\") " Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.129725 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities" (OuterVolumeSpecName: "utilities") pod "7566c48c-5ed7-4e63-a0ff-59af87792dfd" (UID: "7566c48c-5ed7-4e63-a0ff-59af87792dfd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.140274 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x" (OuterVolumeSpecName: "kube-api-access-24x6x") pod "7566c48c-5ed7-4e63-a0ff-59af87792dfd" (UID: "7566c48c-5ed7-4e63-a0ff-59af87792dfd"). InnerVolumeSpecName "kube-api-access-24x6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.224199 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ssm94" event={"ID":"7566c48c-5ed7-4e63-a0ff-59af87792dfd","Type":"ContainerDied","Data":"b884fa0de9c6597cd659e67f1833cbc262c82aa3b4f6494550a8afd168c4f6c5"} Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.224270 4865 scope.go:117] "RemoveContainer" containerID="94b5dc8c17ef863a44011d3eaad0522af50684db2c1b33bccf41cc9af085f9d2" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.224277 4865 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ssm94" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.231280 4865 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.231329 4865 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24x6x\" (UniqueName: \"kubernetes.io/projected/7566c48c-5ed7-4e63-a0ff-59af87792dfd-kube-api-access-24x6x\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.249279 4865 scope.go:117] "RemoveContainer" containerID="740510c0bc68cb4aa212693c5adeccae9a0a50e204fd7ae5d1452ebf81881a76" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.262170 4865 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7566c48c-5ed7-4e63-a0ff-59af87792dfd" (UID: "7566c48c-5ed7-4e63-a0ff-59af87792dfd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.272003 4865 scope.go:117] "RemoveContainer" containerID="dc850f2f25f83739dfdac08aef6437765f329cdbe238f7adda5ace2b26aa37aa" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.333248 4865 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7566c48c-5ed7-4e63-a0ff-59af87792dfd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.553613 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:25:45 crc kubenswrapper[4865]: I0126 17:25:45.560822 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ssm94"] Jan 26 17:25:46 crc kubenswrapper[4865]: I0126 17:25:46.367317 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7566c48c-5ed7-4e63-a0ff-59af87792dfd" path="/var/lib/kubelet/pods/7566c48c-5ed7-4e63-a0ff-59af87792dfd/volumes" Jan 26 17:25:50 crc kubenswrapper[4865]: I0126 17:25:50.357702 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:25:50 crc kubenswrapper[4865]: E0126 17:25:50.358382 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:26:02 crc kubenswrapper[4865]: I0126 17:26:02.359938 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:26:02 crc kubenswrapper[4865]: E0126 17:26:02.360646 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:26:06 crc kubenswrapper[4865]: I0126 17:26:06.053135 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-db-sync-zz8bs"] Jan 26 17:26:06 crc kubenswrapper[4865]: I0126 17:26:06.059202 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-db-sync-zz8bs"] Jan 26 17:26:06 crc kubenswrapper[4865]: I0126 17:26:06.367013 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="241e06d3-d93d-4495-92db-79f641de0bdc" path="/var/lib/kubelet/pods/241e06d3-d93d-4495-92db-79f641de0bdc/volumes" Jan 26 17:26:14 crc kubenswrapper[4865]: I0126 17:26:14.369654 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:26:14 crc kubenswrapper[4865]: E0126 17:26:14.370623 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:26:17 crc kubenswrapper[4865]: I0126 17:26:17.025766 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/placement-db-sync-v8qx4"] Jan 26 17:26:17 crc kubenswrapper[4865]: I0126 17:26:17.036316 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/placement-db-sync-v8qx4"] Jan 26 17:26:18 crc kubenswrapper[4865]: I0126 17:26:18.367524 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1754996-361d-43dd-bb98-c8f8cc9875e2" path="/var/lib/kubelet/pods/d1754996-361d-43dd-bb98-c8f8cc9875e2/volumes" Jan 26 17:26:22 crc kubenswrapper[4865]: I0126 17:26:22.028846 4865 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-db774"] Jan 26 17:26:22 crc kubenswrapper[4865]: I0126 17:26:22.034485 4865 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["nova-kuttl-default/keystone-bootstrap-db774"] Jan 26 17:26:22 crc kubenswrapper[4865]: I0126 17:26:22.369841 4865 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97f5260e-bc1b-4e6f-8bee-8c9054039c3b" path="/var/lib/kubelet/pods/97f5260e-bc1b-4e6f-8bee-8c9054039c3b/volumes" Jan 26 17:26:26 crc kubenswrapper[4865]: I0126 17:26:26.359086 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:26:26 crc kubenswrapper[4865]: E0126 17:26:26.359761 4865 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-q8cb9_openshift-machine-config-operator(0ddedab5-2528-4881-9251-9ba5334aea61)\"" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" podUID="0ddedab5-2528-4881-9251-9ba5334aea61" Jan 26 17:26:35 crc kubenswrapper[4865]: I0126 17:26:35.549041 4865 scope.go:117] "RemoveContainer" containerID="d815c3f887b492e72a3e6ec3bad4d5bb8a5cb88c04b4c586fdf3454fa4306206" Jan 26 17:26:35 crc kubenswrapper[4865]: I0126 17:26:35.578117 4865 scope.go:117] "RemoveContainer" containerID="87fd7d654074c8075d308136c6ca88119875f289d124506f455471751dcffcb7" Jan 26 17:26:35 crc kubenswrapper[4865]: I0126 17:26:35.726802 4865 scope.go:117] "RemoveContainer" containerID="9323e059beebe11a135a60a87838224659095f2f7142b2cd99d75cb027c05a6d" Jan 26 17:26:35 crc kubenswrapper[4865]: I0126 17:26:35.760224 4865 scope.go:117] "RemoveContainer" containerID="1ede6b41f1bd525f6609866fbaa6542cd5fdfb05e3cd6acd72c73721df3a4774" Jan 26 17:26:38 crc kubenswrapper[4865]: I0126 17:26:38.359146 4865 scope.go:117] "RemoveContainer" containerID="f9020274f50205ff9b604c78618afd92d93eda287ad00453f536fe9f4e5fcbd5" Jan 26 17:26:38 crc kubenswrapper[4865]: I0126 17:26:38.632866 4865 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-q8cb9" event={"ID":"0ddedab5-2528-4881-9251-9ba5334aea61","Type":"ContainerStarted","Data":"1f0da96ad51070d8337c59ebed7246e8f87cc1343a47c72da57e7d63baf320d5"} var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515135721775024462 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015135721776017400 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015135715617016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015135715617015470 5ustar corecore